Last Notes
/// BIP-64MOD + GCC: Complete Git Empty & Genesis Constants
///
/// This module provides the standard cryptographic identifiers for "null",
/// "empty", and "genesis" states, including NIP-19 (Bech32) identities.
pub struct GitEmptyState;
impl GitEmptyState {
// === NULL REFERENCE (Zero Hash) ===
pub const NULL_SHA256: &'static str = "0000000000000000000000000000000000000000000000000000000000000000";
// === EMPTY BLOB (Empty File) ===
pub const BLOB_SHA1: &'static str = "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391";
pub const BLOB_SHA256: &'static str = "473a0f4c3be8a93681a267e3b1e9a7dcda1185436fe141f7749120a303721813";
pub const BLOB_NSEC: &'static str = "nsec1guaq7npmaz5ndqdzvl3mr6d8mndprp2rdls5ram5jys2xqmjrqfsdzhrp6";
pub const BLOB_NPUB: &'static str = "npub180cvv07tjdrghvkyh6964p7w9vsqpf3p05868v399v86p8y6f69sq5fdp0";
// === EMPTY TREE (Empty Directory) ===
pub const TREE_SHA1: &'static str = "4b825dc642cb6eb9a060e54bf8d69288fbee4904";
pub const TREE_SHA256: &'static str = "6ef19b41225c5369f1c104d45d8d85efa9b057b53b14b4b9b939dd74decc5321";
pub const TREE_NSEC: &'static str = "nsec1dmceksfzt3fknuwpqn29mrv9a75mq4a48v2tfwde88whfhkv2vsslsc46c";
pub const TREE_NPUB: &'static str = "npub1pxmpep6yk7z6p332u9588k0vscg26rv29pynvscg26rv29pynvsq6erdfh";
// === GENESIS COMMIT (DeepSpaceM1 @ Epoch 0) ===
/// Result of: git commit --allow-empty -m 'Initial commit'
/// With Author/Committer: DeepSpaceM1 <
[email protected]> @ 1970-01-01T00:00:00Z
pub const GENESIS_AUTHOR_NAME: &'static str = "DeepSpaceM1";
pub const GENESIS_AUTHOR_EMAIL: &'static str = "
[email protected]";
pub const GENESIS_DATE_UNIX: i64 = 0;
pub const GENESIS_MESSAGE: &'static str = "Initial commit";
/// The resulting SHA-256 Commit Hash for this specific configuration
pub const GENESIS_COMMIT_SHA256: &'static str = "e9768652d87e07663479a0ad402513f56d953930b659c2ef389d4d03d3623910";
/// The NIP-19 Identity associated with the Genesis Commit
pub const GENESIS_NSEC: &'static str = "nsec1jpxmpep6yk7z6p332u9588k0vscg26rv29pynvscg26rv29pynvsq68at9d";
pub const GENESIS_NPUB: &'static str = "npub1pxmpep6yk7z6p332u9588k0vscg26rv29pynvscg26rv29pynvsq6erdfh";
}
/// Helper for constructing the commit object string for hashing
pub mod builders {
use super::GitEmptyState;
pub fn build_genesis_commit_object() -> String {
format!(
"tree {}\nauthor {} <{}> {} +0000\ncommitter {} <{}> {} +0000\n\n{}\n",
GitEmptyState::TREE_SHA256,
GitEmptyState::GENESIS_AUTHOR_NAME,
GitEmptyState::GENESIS_AUTHOR_EMAIL,
GitEmptyState::GENESIS_DATE_UNIX,
GitEmptyState::GENESIS_AUTHOR_NAME,
GitEmptyState::GENESIS_AUTHOR_EMAIL,
GitEmptyState::GENESIS_DATE_UNIX,
GitEmptyState::GENESIS_MESSAGE
)
}
}
fn main() {
println!("--- BIP-64MOD + GCC Genesis State ---");
println!("Commit Hash: {}", GitEmptyState::GENESIS_COMMIT_SHA256);
println!("Author: {} <{}>", GitEmptyState::GENESIS_AUTHOR_NAME, GitEmptyState::GENESIS_AUTHOR_EMAIL);
println!("Timestamp: {}", GitEmptyState::GENESIS_DATE_UNIX);
println!("NSEC: {}", GitEmptyState::GENESIS_NSEC);
let object_raw = builders::build_genesis_commit_object();
println!("\nRaw Git Commit Object:\n---\n{}---", object_raw);
}
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use rand::thread_rng;
#[cfg(feature = "nostr")]
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut rng = thread_rng();
let max_signers = 3;
let min_signers = 2;
////////////////////////////////////////////////////////////////////////////
// Round 0: Key Generation (Trusted Dealer)
////////////////////////////////////////////////////////////////////////////
// In a real P2P setup, you'd use Distributed Key Generation (DKG).
// For local testing/simulations, the trusted dealer is faster.
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
max_signers,
min_signers,
frost::keys::IdentifierList::Default,
&mut rng,
)?;
// Verifying the public key exists
let group_public_key = pubkey_package.verifying_key();
println!("Group Public Key: {:?}", group_public_key);
////////////////////////////////////////////////////////////////////////////
// Round 1: Commitment
////////////////////////////////////////////////////////////////////////////
let message = b"BIP-64MOD Consensus Proposal";
let mut signing_commitments = BTreeMap::new();
let mut participant_nonces = BTreeMap::new();
// Participants 1 and 2 decide to sign
for i in 1..=min_signers {
let identifier = frost::Identifier::try_from(i as u16)?;
// Generate nonces and commitments
let (nonces, commitments) = frost::round1::commit(
shares[&identifier].signing_share(),
&mut rng,
);
signing_commitments.insert(identifier, commitments);
participant_nonces.insert(identifier, nonces);
}
////////////////////////////////////////////////////////////////////////////
// Round 2: Signing
////////////////////////////////////////////////////////////////////////////
let mut signature_shares = BTreeMap::new();
let signing_package = frost::SigningPackage::new(signing_commitments, message);
for i in 1..=min_signers {
let identifier = frost::Identifier::try_from(i as u16)?;
let nonces = &participant_nonces[&identifier];
// Each participant produces a signature share
let key_package: frost::keys::KeyPackage = shares[&identifier].clone().try_into()?;
let share = frost::round2::sign(&signing_package, nonces, &key_package)?;
signature_shares.insert(identifier, share);
}
////////////////////////////////////////////////////////////////////////////
// Finalization: Aggregation
////////////////////////////////////////////////////////////////////////////
let group_signature = frost::aggregate(
&signing_package,
&signature_shares,
&pubkey_package,
)?;
// Verification
group_public_key.verify(message, &group_signature)?;
println!("Threshold signature verified successfully!");
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example trusted-dealer --features nostr");
}
[workspace]
members = [".", "src/get_file_hash_core", "n34", "n34-relay"]
[workspace.package]
version = "0.4.7"
edition = "2024"
license = "MIT"
authors = ["gnostr
[email protected]"]
documentation = "https://github.com/gnostr-org/get_file_hash#readme"
homepage = "https://github.com/gnostr-org/get_file_hash"
repository = "https://github.com/gnostr-org/get_file_hash"
description = "A utility crate providing a procedural macro to compute and embed file hashes at compile time."
[package]
name = "get_file_hash"
version.workspace = true
edition.workspace = true
description.workspace = true
repository.workspace = true
homepage.workspace = true
authors.workspace = true
license.workspace = true
[package.metadata.wix]
upgrade-guid = "DED69220-26E3-4406-B564-7F2B58C56F57"
path-guid = "8DB39A25-8B99-4C25-8CF5-4704353C7C6E"
license = false
eula = false
[features]
nostr = ["dep:nostr", "dep:nostr-sdk", "dep:hex"]
frost = ["dep:nostr", "dep:nostr-sdk", "dep:hex"]
gen-protos = []
[workspace.dependencies]
get_file_hash_core = { features = ["nostr"], path = "src/get_file_hash_core", version = "0.4.7" }
rand_chacha = "0.3"
sha2 = "0.11.0"
nostr = { version = "0.44.2", features = ["std", "nip46"] }
nostr-sdk = { version = "0.44.0", default-features = false, features = ["default"] }
hex = "0.4.2"
tokio = "1"
serde_json = "1.0"
csv = { version = "1.3.0", default-features = false }
url = "2.5.0"
reqwest = { version = "0.12.0", default-features = false }
tempfile = "3.27.0"
rand = "0.8"
frost-secp256k1-tr = "3.0.0-rc.0"
serial_test = { version = "3.4.0", features = ["test_logging"] }
log = "0.4"
n34 = { version = "0.4.0", path = "n34" }
n34-relay = { version = "0.1.1", path = "n34-relay" }
chrono = "0.4.41"
convert_case = "0.8.0"
dirs = "6.0.0"
easy-ext = "1.0.2"
either = "1.15.0"
futures = "0.3.31"
nostr-browser-signer-proxy = "0.43.0"
regex = "1.11.1"
thiserror = "2.0.12"
toml = "0.9.4"
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
[dependencies]
get_file_hash_core = { workspace = true, features = ["nostr"] }
sha2 = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
nostr = { workspace = true, optional = true }
nostr-sdk = { workspace = true, optional = true }
hex = { workspace = true, optional = true }
tokio = { workspace = true, features = ["full"] }
frost-secp256k1-tr = { workspace = true }
rand = { workspace = true }
serde_json = { workspace = true }
rand_chacha = { workspace = true }
n34 = { workspace = true }
n34-relay = { workspace = true }
axum = { version = "0.8.6", features = ["http2", "ws"] }
base64 = "0.22.1"
chrono = "0.4.42"
config = { version = "0.15.15", default-features = false, features = ["toml"] }
const_format = "0.2.34"
convert_case = "0.8.0"
easy-ext = "1.0.2"
either = "1.15.0"
flume = "0.11.1"
futures = "0.3.31"
hyper = "1.7.0"
hyper-util = "0.1.17"
parking_lot = { version = "0.12.5", features = ["serde"] }
prost = "0.14.1"
serde = { version = "1.0.219", features = ["rc"] }
#serde_json = "1.0.145"
serde_with = "3.15.0"
sha1 = "0.10.6"
#sha2 = "0.10.9"
strum = { version = "0.27.2", features = ["derive"] }
thiserror = "2.0.16"
tokio-util = { version = "0.7.17", features = ["io"] }
toml = "0.9.5"
tonic-prost = "0.14.2"
tower = { version = "0.5.2", features = ["limit"] }
#tracing = "0.1.41"
#tracing-subscriber = { version = "0.3.20", features = ["env-filter"] }
dirs = "6.0.0"
rhai = { version = "1.23.4", features = [
"no_position",
"sync",
"serde",
"decimal",
] }
##tokio = { version = "1.47.1", features = [
## "macros",
## "rt-multi-thread",
## "signal",
## "fs",
## "process",
##] }
tonic = { version = "0.14.2", features = [
"tls-ring",
"tls-webpki-roots",
"gzip",
"deflate",
] }
tower-http = { version = "0.6.6", features = [
"cors",
"decompression-br",
"decompression-deflate",
"decompression-gzip",
"decompression-zstd",
"trace",
"timeout",
] }
[dependencies.clap]
features = ["derive"]
version = "4.5.42"
[dependencies.clap-verbosity-flag]
default-features = false
features = ["tracing"]
version = "3.0.3"
# We frequently switch between stable and unstable versions; this will make the
# process easier.
## [dependencies.nostr]
## default-features = false
## features = ["std"]
## git = "https://git.4rs.nl/mirrors/nostr.git"
## rev = "27a1947d3"
## # version = "0.45.0"
[dependencies.nostr-database]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[dependencies.nostr-lmdb]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[dependencies.nostr-relay-builder]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[build-dependencies]
get_file_hash_core = { workspace = true, features = ["nostr"] }
sha2 = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["full"] }
nostr = { workspace = true }
nostr-sdk = { workspace = true }
hex = { workspace = true }
tonic-prost-build = "0.14.2"
[target.'cfg(not(windows))'.build-dependencies]
protobuf-src = "2.1.0"
# The profile that 'dist' will build with
[profile.dist]
inherits = "release"
lto = "thin"
[dev-dependencies]
serial_test = { workspace = true }
[[example]]
name = "gnostr-build"
path = "examples/gnostr-build.rs"
required-features = ["nostr"]
use rand_chacha::ChaCha20Rng;
use rand_chacha::rand_core::SeedableRng;
use frost_secp256k1_tr as frost;
use frost::{Identifier, keys::IdentifierList, round1, round2};
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Dealer Setup
let mut dealer_rng = ChaCha20Rng::from_seed([0u8; 32]);
let min_signers = 2;
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
3, min_signers, IdentifierList::Default, &mut dealer_rng
)?;
// 2. Setup Participant Identifiers
let p1_id = Identifier::try_from(1u16)?;
let p2_id = Identifier::try_from(2u16)?;
// 3. Construct KeyPackages manually for RC.0
let p1_verifying_share = frost::keys::VerifyingShare::from(*shares[&p1_id].signing_share());
let p1_key_package = frost::keys::KeyPackage::new(
p1_id,
*shares[&p1_id].signing_share(),
p1_verifying_share,
*pubkey_package.verifying_key(),
min_signers,
);
let p2_verifying_share = frost::keys::VerifyingShare::from(*shares[&p2_id].signing_share());
let p2_key_package = frost::keys::KeyPackage::new(
p2_id,
*shares[&p2_id].signing_share(),
p2_verifying_share,
*pubkey_package.verifying_key(),
min_signers,
);
// 4. Round 1: Commitments
let mut rng1 = ChaCha20Rng::from_seed([1u8; 32]);
let (p1_nonces, p1_commitments) = round1::commit(p1_key_package.signing_share(), &mut rng1);
let mut rng2 = ChaCha20Rng::from_seed([2u8; 32]);
let (p2_nonces, p2_commitments) = round1::commit(p2_key_package.signing_share(), &mut rng2);
// 5. Coordinator: Signing Package
let message = b"gnostr-commit-7445bd727dbce5bac004861a45c35ccd4f4a195bfb1cc39f2a7c9fd3aa3b6547";
let mut commitments_map = BTreeMap::new();
commitments_map.insert(p1_id, p1_commitments);
commitments_map.insert(p2_id, p2_commitments);
let signing_package = frost::SigningPackage::new(commitments_map, message);
// 6. Round 2: Partial Signatures
let p1_signature_share = round2::sign(&signing_package, &p1_nonces, &p1_key_package)?;
let p2_signature_share = round2::sign(&signing_package, &p2_nonces, &p2_key_package)?;
// 7. Aggregation
let mut signature_shares = BTreeMap::new();
signature_shares.insert(p1_id, p1_signature_share);
signature_shares.insert(p2_id, p2_signature_share);
let group_signature = frost::aggregate(&signing_package, &signature_shares, &pubkey_package)?;
println!("--- BIP-64MOD Aggregated Signature ---");
println!("Final Signature (Hex): {}", hex::encode(group_signature.serialize()?));
// Final Verification
pubkey_package.verifying_key().verify(message, &group_signature)?;
println!("🛡️ Signature is valid for the 2nd generation group!");
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() { println!("Run with --features nostr"); }
#[cfg(feature = "nostr")]
use rand_chacha::ChaCha20Rng;
#[cfg(feature = "nostr")]
use rand_chacha::rand_core::SeedableRng;
#[cfg(feature = "nostr")]
use hex;
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use frost::keys::IdentifierList;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Create a deterministic seed (e.g., 32 bytes of zeros or a Git Hash)
let seed_hex = "473a0f4c3be8a93681a267e3b1e9a7dcda1185436fe141f7749120a303721813";
let seed_bytes = hex::decode(seed_hex)?;
let mut rng = ChaCha20Rng::from_seed(seed_bytes.try_into().map_err(|_| "Invalid seed length")?);
let max_signers = 3;
let min_signers = 2;
////////////////////////////////////////////////////////////////////////////
// Round 0: Key Generation (Trusted Dealer)
////////////////////////////////////////////////////////////////////////////
// Using IdentifierList::Default creates identifiers 1, 2, 3...
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
max_signers,
min_signers,
IdentifierList::Default,
&mut rng,
)?;
println!("--- Deterministic FROST Dealer ---");
println!("Threshold: {} of {}", min_signers, max_signers);
println!("Number of shares generated: {}", shares.len());
println!("\n--- Verifying Shares Against Commitments ---");
for (identifier, share) in &shares {
// The Deterministic Values (Scalar Hex)
// Because your seed is fixed to the EMPTY_BLOB_SHA256,
// the "redacted" values in your output are always the same.
// Here are the Secret Signing Shares (the private scalars) for your 2-of-3 setup:
//
// Participant,Identifier (x),Signing Share (f(x)) in Hex
// Participant 1,...0001,757f49553754988450d995c65a0459a0f5a703d7c585f95f468202d09a365f57
// Participant 2,...0002,a3c4835e32308cb11b43968962290bc9171f1f1ca90c21741890e4f326f9879b
// Participant 3,...0003,d209bd672d0c80dd65ad974c6a4dc1f138973a618c924988eaaa0715b3bcafdf
//
// println!("Participant Identifier: {:?} {:?}", identifier, _share);
//
// In FROST, the 'verify' method checks the share against the VSS commitment
match share.verify() {
Ok(_) => {
println!("Participant {:?}: Valid ✅", identifier);
}
Err(e) => {
println!("Participant {:?}: INVALID! ❌ Error: {:?}", identifier, e);
}
}
}
let pubkey_bytes = pubkey_package.verifying_key().serialize()?;
println!("Group Public Key (Hex Compressed): {}", hex::encode(&pubkey_bytes));
let x_only_hex = hex::encode(&pubkey_bytes[1..]);
println!("Group Public Key (Hex X-Only): {}", x_only_hex);
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("Run with --features nostr to enable this example.");
}
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
get_file_hash_core::frost_mailbox_logic::simulate_frost_mailbox_post_signer()
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example frost_mailbox_post --features nostr");
}
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use rand::thread_rng;
#[cfg(feature = "nostr")]
use std::collections::BTreeMap;
/// A simplified ROAST Coordinator that manages signing sessions
#[cfg(feature = "nostr")]
struct RoastCoordinator {
min_signers: u16,
_message: Vec<u8>,
commitments: BTreeMap<frost::Identifier, frost::round1::SigningCommitments>,
nonces: BTreeMap<frost::Identifier, frost::round1::SigningNonces>,
shares: BTreeMap<frost::Identifier, frost::round2::SignatureShare>,
}
#[cfg(feature = "nostr")]
impl RoastCoordinator {
fn new(min_signers: u16, message: &[u8]) -> Self {
Self {
min_signers,
_message: message.to_vec(),
commitments: BTreeMap::new(),
nonces: BTreeMap::new(),
shares: BTreeMap::new(),
}
}
/// ROAST Logic: Collect commitments until we hit the threshold.
/// In a real P2P system, this would be an async stream handler.
fn add_commitment(&mut self, id: frost::Identifier, comms: frost::round1::SigningCommitments, nonces: frost::round1::SigningNonces) {
if self.commitments.len() < self.min_signers as usize {
self.commitments.insert(id, comms);
self.nonces.insert(id, nonces);
}
}
/// ROAST Logic: Collect signature shares.
fn add_share(&mut self, id: frost::Identifier, share: frost::round2::SignatureShare) {
if self.shares.len() < self.min_signers as usize {
self.shares.insert(id, share);
}
}
fn is_ready_to_sign(&self) -> bool {
self.commitments.len() >= self.min_signers as usize
}
fn is_ready_to_aggregate(&self) -> bool {
self.shares.len() >= self.min_signers as usize
}
}
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut rng = thread_rng();
let (max_signers, min_signers) = (5, 3);
let message = b"BIP-64MOD Context: ROAST Coordination";
// 1. Setup: Generate keys (Dealer mode for simulation)
let (key_shares, pubkey_package) = frost::keys::generate_with_dealer(
max_signers,
min_signers,
frost::keys::IdentifierList::Default,
&mut rng,
)?;
let mut coordinator = RoastCoordinator::new(min_signers, message);
// 2. Round 1: Asynchronous Commitment Collection
// Simulate signers 1, 3, and 5 responding first (ROAST skips 2 and 4)
for &id_num in &[1, 3, 5] {
let id = frost::Identifier::try_from(id_num as u16)?;
let (nonces, comms) = frost::round1::commit(key_shares[&id].signing_share(), &mut rng);
// Signers store their nonces locally, send comms to coordinator
coordinator.add_commitment(id, comms, nonces);
// Note: Signer 2 was "offline", but ROAST doesn't care because we hit 3/5.
}
// 3. Round 2: Signing
if coordinator.is_ready_to_sign() {
let signing_package = frost::SigningPackage::new(coordinator.commitments.clone(), message);
let mut temp_shares = BTreeMap::new();
for &id in coordinator.commitments.keys() {
// In reality, coordinator sends signing_package to signers
// Here we simulate the signers producing shares
let nonces = &coordinator.nonces[&id];
let key_package: frost::keys::KeyPackage = key_shares[&id].clone().try_into()?;
let share = frost::round2::sign(&signing_package, &nonces, &key_package)?;
temp_shares.insert(id, share);
}
for (id, share) in temp_shares {
coordinator.add_share(id, share);
}
}
// 4. Finalization: Aggregation
if coordinator.is_ready_to_aggregate() {
let signing_package = frost::SigningPackage::new(coordinator.commitments.clone(), message);
let group_signature = frost::aggregate(
&signing_package,
&coordinator.shares,
&pubkey_package,
)?;
pubkey_package.verifying_key().verify(message, &group_signature)?;
println!("ROAST-coordinated signature verified!");
}
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example roast-experiment --features nostr");
}
/// Usage: cargo run --example cli-parser --features nostr
#[cfg(not(feature = "nostr"))]
fn main() {
println!("Run with --features nostr to enable this example.");
}
#[cfg(feature = "nostr")]
use clap::{Parser, Subcommand};
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use frost::round1::{self, SigningCommitments, SigningNonces};
#[cfg(feature = "nostr")]
use frost::keys::IdentifierList;
#[cfg(feature = "nostr")]
use rand_chacha::ChaCha20Rng;
#[cfg(feature = "nostr")]
use rand::SeedableRng;
#[cfg(feature = "nostr")]
use std::fs;
#[cfg(feature = "nostr")]
use std::path::PathBuf;
#[cfg(feature = "nostr")]
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
#[derive(Parser)]
#[cfg(feature = "nostr")]
#[command(name = "gnostr-frost")]
#[cfg(feature = "nostr")]
#[command(version = "0.1.0")]
#[cfg(feature = "nostr")]
#[command(about = "BIP-64MOD + GCC Threshold Signature Tool", long_about = None)]
#[cfg(feature = "nostr")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[cfg(feature = "nostr")]
#[derive(Subcommand)]
#[cfg(feature = "nostr")]
enum Commands {
/// Step 1: Generate a new T-of-N key set (Dealer Mode)
Keygen {
#[arg(long, default_value_t = 2)]
threshold: u16,
#[arg(long, default_value_t = 3)]
total: u16,
#[arg(short, long)]
output_dir: Option<PathBuf>,
},
/// Step 2: Generate a batch of public/private nonces
Batch {
#[arg(short, long, default_value_t = 10)]
count: u16,
#[arg(short, long)]
key: PathBuf,
},
/// Step 3: Sign a message hash using a vaulted nonce index
Sign {
#[arg(short, long)]
message: String,
#[arg(short, long)]
index: u64,
#[arg(short, long)]
key: PathBuf,
#[arg(short, long)]
vault: PathBuf,
},
/// Step 4: Aggregate shares into a final BIP-340 signature
Aggregate {
#[arg(short, long)]
message: String,
#[arg(required = true)]
shares: Vec<String>,
},
/// Step 5: Verify a BIP-340 signature against the group public key
Verify {
#[arg(short, long)]
message: String,
#[arg(short, long)]
signature: String,
#[arg(short, long)]
public_key: PathBuf,
},
}
#[cfg(feature = "nostr")]
type NonceMap = BTreeMap<u32, SigningNonces>;
#[cfg(feature = "nostr")]
type CommitmentMap = BTreeMap<u32, SigningCommitments>;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let cli = Cli::parse();
match &cli.command {
Commands::Keygen { threshold, total, output_dir } => {
println!("🛠️ Executing Keygen: {}-of-{}...", threshold, total);
let mut rng = ChaCha20Rng::from_entropy();
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
*total, *threshold, IdentifierList::Default, &mut rng
)?;
let path = output_dir.as_deref().unwrap_or(std::path::Path::new("."));
let pub_path = path.join("group_public.json");
fs::write(&pub_path, serde_json::to_string_pretty(&pubkey_package)?)?;
println!("✅ Saved Group Public Key to {:?}", pub_path);
for (id, share) in shares {
let key_pkg = frost::keys::KeyPackage::new(
id,
*share.signing_share(),
frost::keys::VerifyingShare::from(*share.signing_share()),
*pubkey_package.verifying_key(),
*threshold,
);
let id_hex = hex::encode(id.serialize());
let file_name = format!("p{}_key.json", id_hex);
fs::write(path.join(file_name), serde_json::to_string_pretty(&key_pkg)?)?;
}
}
Commands::Batch { count, key } => {
println!("📦 Executing Batch...");
let key_pkg: frost::keys::KeyPackage = serde_json::from_str(&fs::read_to_string(key)?)?;
let mut rng = ChaCha20Rng::from_entropy();
let mut public_commitments = CommitmentMap::new();
let mut secret_nonce_vault = NonceMap::new();
for i in 0..*count {
let (nonces, commitments) = round1::commit(key_pkg.signing_share(), &mut rng);
public_commitments.insert(i as u32, commitments);
secret_nonce_vault.insert(i as u32, nonces);
}
let id_hex = hex::encode(key_pkg.identifier().serialize());
fs::write(format!("p{}_vault.json", id_hex), serde_json::to_string(&secret_nonce_vault)?)?;
fs::write(format!("p{}_public_comms.json", id_hex), serde_json::to_string(&public_commitments)?)?;
println!("✅ Nonces and Commitments saved for ID {}", id_hex);
}
Commands::Sign { message, index, key, vault } => {
println!("✍️ Executing Sign: Index #{}...", index);
let key_pkg: frost::keys::KeyPackage = serde_json::from_str(&fs::read_to_string(key)?)?;
let mut vault_data: NonceMap = serde_json::from_str(&fs::read_to_string(vault)?)?;
let signing_nonces = vault_data.remove(&(*index as u32)).ok_or("Nonce not found!")?;
fs::write(vault, serde_json::to_string(&vault_data)?)?;
let mut commitments_map = BTreeMap::new();
commitments_map.insert(*key_pkg.identifier(), *signing_nonces.commitments());
// Discovery logic for peers
for entry in fs::read_dir(".")? {
let path = entry?.path();
let fname = path.file_name().unwrap().to_str().unwrap();
if fname.starts_with('p') && fname.contains("_public_comms.json") {
let id_hex = fname.strip_prefix('p').unwrap().strip_suffix("_public_comms.json").unwrap();
let peer_id: frost::Identifier = serde_json::from_str(&format!("\"{}\"", id_hex))?;
if peer_id != *key_pkg.identifier() {
let peer_comms: CommitmentMap = serde_json::from_str(&fs::read_to_string(&path)?)?;
if let Some(c) = peer_comms.get(&(*index as u32)) {
commitments_map.insert(peer_id, *c);
}
}
}
}
let signing_package = frost::SigningPackage::new(commitments_map, message.as_bytes());
let share = frost::round2::sign(&signing_package, &signing_nonces, &key_pkg)?;
let share_file = format!("p{}_share.json", hex::encode(key_pkg.identifier().serialize()));
fs::write(&share_file, serde_json::to_string(&share)?)?;
println!("✅ Share saved to {}", share_file);
}
Commands::Aggregate { message, shares } => {
println!("🧬 Executing Aggregate...");
let pubkey_package: frost::keys::PublicKeyPackage = serde_json::from_str(&fs::read_to_string("group_public.json")?)?;
let mut commitments_map = BTreeMap::new();
let mut signature_shares = BTreeMap::new();
for share_path in shares {
let share: frost::round2::SignatureShare = serde_json::from_str(&fs::read_to_string(share_path)?)?;
let fname = std::path::Path::new(share_path).file_name().unwrap().to_str().unwrap();
let id_hex = fname.strip_prefix('p').unwrap().strip_suffix("_share.json").unwrap();
let peer_id: frost::Identifier = serde_json::from_str(&format!("\"{}\"", id_hex))?;
let comms_file = format!("p{}_public_comms.json", id_hex);
let peer_comms: CommitmentMap = serde_json::from_str(&fs::read_to_string(comms_file)?)?;
commitments_map.insert(peer_id, *peer_comms.get(&0).unwrap());
signature_shares.insert(peer_id, share);
}
let signing_package = frost::SigningPackage::new(commitments_map, message.as_bytes());
let group_sig = frost::aggregate(&signing_package, &signature_shares, &pubkey_package)?;
let sig_hex = hex::encode(group_sig.serialize()?);
println!("✅ Aggregation Successful!\nFinal BIP-340 Signature: {}", sig_hex);
fs::write("final_signature.json", serde_json::to_string(&group_sig)?)?;
}
Commands::Verify { message, signature, public_key } => {
println!("🔍 Executing Verify...");
let pubkey_package: frost::keys::PublicKeyPackage = serde_json::from_str(&fs::read_to_string(public_key)?)?;
let sig_bytes = hex::decode(signature)?;
let group_sig = frost::Signature::deserialize(&sig_bytes)?;
match pubkey_package.verifying_key().verify(message.as_bytes(), &group_sig) {
Ok(_) => println!("✅ SUCCESS: The signature is VALID!"),
Err(_) => println!("❌ FAILURE: Invalid signature."),
}
}
}
Ok(())
}
# `build.rs` Documentation
This document explains the functionality of the `build.rs` script in this project. The `build.rs` script is a special Rust file that, if present, Cargo will compile and run *before* compiling the rest of your package. It's typically used for tasks that need to be performed during the build process, such as generating code, setting environment variables, or performing conditional compilation.
## Core Functionality
The `build.rs` script in this project performs the following key functions:
1. **Environment Variable Injection:** It computes various project-related values at compile time and injects them as environment variables (`CARGO_RUSTC_ENV=...`) that can be accessed by the main crate using `env!("VAR_NAME")`. This includes:
* `CARGO_PKG_NAME`: The name of the current package (from `Cargo.toml`).
* `CARGO_PKG_VERSION`: The version of the current package (from `Cargo.toml`).
* `GIT_COMMIT_HASH`: The full commit hash of the current Git HEAD (if in a Git repository).
* `GIT_BRANCH`: The name of the current Git branch (if in a Git repository).
* `CARGO_TOML_HASH`: The SHA-256 hash of the `Cargo.toml` file.
* `LIB_HASH`: The SHA-256 hash of the `src/lib.rs` file.
* `BUILD_HASH`: The SHA-256 hash of the `build.rs` file itself.
2. **Rerun Conditions:** It tells Cargo when to re-run the build script. This ensures that the injected environment variables and any conditional compilation logic are up-to-date if relevant files change:
* `Cargo.toml`
* `src/lib.rs`
* `build.rs`
* `.git/HEAD` (to detect changes in the Git repository like new commits or branch switches).
* `src/get_file_hash_core/src/online_relays_gps.csv` (conditionally, if the file exists).
3. **Conditional Nostr Event Publishing (Release Builds with `nostr` feature):**
If the project is being compiled in **release mode (`--release`)** and the **`nostr` feature is enabled (`--features nostr`)**, the `build.rs` script will connect to Nostr relays and publish events. This is intended for "deterministic Nostr event build examples" as indicated by the comments in the file.
* **Relay Management:** It retrieves a list of default relay URLs. During event publishing, it identifies and removes "unfriendly" or unresponsive relays (e.g., those with timeout, connection issues, or spam blocks) from the list for subsequent publications.
* **File Hashing and Key Generation:** For each Git-tracked file (when in a Git repository), it computes its SHA-256 hash. This hash is then used to derive a Nostr `SecretKey`.
* **Event Creation:**
* **Individual File Events:** For each Git-tracked file, a Nostr `text_note` event is created. This event includes tags for:
* `#file`: The path of the file.
* `#version`: The package version.
* `#commit`: The Git commit hash (if in a Git repository).
* `#branch`: The Git branch name (if in a Git repository).
* **Metadata Event:** It publishes a metadata event using `get_file_hash_core::publish_metadata_event`.
* **Linking Event (Build Manifest):** After processing all individual files, if any events were published, a final "build manifest" `text_note` event is created. This event links to all the individual file events that were published during the build using event tags.
* **Output Storage:** The JSON representation of successfully published Nostr events (specifically the `EventId`) is saved to `~/.gnostr/build/{package_version}/{file_path_str_sanitized}/{hash}/{public_key}/{event_id}.json`. This provides a local record of what was published.
### `publish_nostr_event_if_release` Function
This asynchronous helper function is responsible for:
* Adding relays to the Nostr client.
* Connecting to relays.
* Signing the provided `EventBuilder` to create an `Event`.
* Sending the event to the configured relays.
* Logging success or failure for each relay.
* Identifying and removing unresponsive relays from the `relay_urls` list.
* Saving the published event's JSON to the local filesystem.
### `should_remove_relay` Function
This helper function determines if a relay should be considered "unfriendly" or unresponsive based on common error messages received during Nostr event publication.
## Usage
To prevent 'Too many open files' errors, especially during builds and tests involving numerous file operations or subprocesses (like `git ls-files` or parallel test execution), it may be necessary to increase the file descriptor limit.
* **For local development**: Run `ulimit -n 4096` in your terminal session before executing `cargo build` or `cargo test`. This setting is session-specific.
* **For CI environments**: The `.github/workflows/rust.yml` workflow is configured to set `ulimit -n 4096` for relevant test steps to ensure consistent execution.
The values set by `build.rs` can be accessed in your Rust code (e.g., `src/lib.rs`) at compile time using the `env!` macro. For example:
```rust
pub const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
```
The Nostr event publishing functionality of `build.rs` is primarily for release builds with the `nostr` feature enabled, allowing for the automatic, deterministic publication of project state to the Nostr network as part of the CI/CD pipeline.
## Example Commands
To interact with the `build.rs` script's features, especially those related to Nostr event publishing, you can use the following `cargo` commands:
* **Build in release mode with Nostr feature (verbose output):**
```bash
cargo build --release --workspace --features nostr -vv
```
* **Run tests for `get_file_hash_core` sequentially with Nostr feature and verbose logging (as in CI):**
```bash
RUST_LOG=info,nostr_sdk=debug,frost=debug cargo test -p get_file_hash_core --features nostr -- --test-threads 1 --nocapture
```
* **Run all workspace tests in release mode with Nostr feature:**
```bash
cargo test --workspace --release --features nostr
```
* **Build `get_file_hash_core` in release mode with Nostr feature (very verbose output):**
```bash
cargo build --release --features nostr -vv -p get_file_hash_core
```
* **Run `get_file_hash_core` tests in release mode with Nostr feature (very verbose output):**
```bash
cargo test --release --features nostr -vv -p get_file_hash_core
```
use rand_chacha::ChaCha20Rng;
use rand_chacha::rand_core::SeedableRng;
use frost_secp256k1_tr as frost;
use frost::{Identifier, keys::IdentifierList};
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. We need the dealer setup first to get a real SigningShare
let dealer_seed = [0u8; 32];
let mut dealer_rng = ChaCha20Rng::from_seed(dealer_seed);
let (shares, _pubkey_package) = frost::keys::generate_with_dealer(
3, 2, IdentifierList::Default, &mut dealer_rng
)?;
// 2. Setup nonce RNG
let nonce_seed = [1u8; 32];
let mut rng = ChaCha20Rng::from_seed(nonce_seed);
// 3. Get Participant 1's share
let p1_id = Identifier::try_from(1u16)?;
let p1_share = shares.get(&p1_id).ok_or("Share not found")?;
////////////////////////////////////////////////////////////////////////////
// Round 1: Commitments & Nonces
////////////////////////////////////////////////////////////////////////////
// In RC.0, commit() requires the secret share reference
let (p1_nonces, p1_commitments) = frost::round1::commit(p1_share.signing_share(), &mut rng);
println!("--- BIP-64MOD Round 1: Nonce Generation ---");
println!("Participant Identifier: {:?}", p1_id);
// 4. Handle Results for serialization
println!("\nPublic Signing Commitments (To be shared):");
println!(" Hiding: {}", hex::encode(p1_commitments.hiding().serialize()?));
println!(" Binding: {}", hex::encode(p1_commitments.binding().serialize()?));
// Keep nonces in memory for the next step
let _p1_secret_nonces = p1_nonces;
println!("\n✅ Nonces generated and tied to Participant 1's share.");
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("Run with --features nostr to enable this example.");
}
#[tokio::main]
#[cfg(feature = "nostr")]
#[allow(unused_imports)]
async fn main() {
use get_file_hash_core::repository_announcement;
use get_file_hash_core::get_file_hash;
use nostr_sdk::Keys;
use sha2::{Digest, Sha256};
use nostr_sdk::EventId;
use std::str::FromStr;
let keys = Keys::generate();
let relay_urls = get_file_hash_core::get_relay_urls();
let project_name = "my-awesome-repo-example";
let description = "A fantastic new project example.";
let clone_url = "
[email protected]:user/my-awesome-repo-example.git";
// Dummy EventId for examples that require a build_manifest_event_id
const DUMMY_BUILD_MANIFEST_ID_STR: &str = "f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0";
let dummy_build_manifest_id = EventId::from_str(DUMMY_BUILD_MANIFEST_ID_STR).unwrap();
// Example 1: Without build_manifest_event_id
println!("Publishing repository announcement without build_manifest_event_id...");
repository_announcement!(
&keys,
&relay_urls,
project_name,
description,
clone_url,
"../Cargo.toml" // Use a known file in your project
);
println!("Repository announcement without build_manifest_event_id published.");
// Example 2: With build_manifest_event_id
println!("Publishing repository announcement with build_manifest_event_id...");
repository_announcement!(
&keys,
&relay_urls,
project_name,
description,
clone_url,
"../Cargo.toml", // Use a known file in your project
Some(&dummy_build_manifest_id)
);
println!("Repository announcement with build_manifest_event_id published.");
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example repository_announcement --features nostr");
}
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
get_file_hash_core::frost_mailbox_logic::simulate_frost_mailbox_coordinator()
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example frost_mailbox --features nostr");
}
[workspace]
members = ["cargo:."]
# Config for 'dist'
[dist]
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.30.3"
# CI backends to support
ci = "github"
# The installers to generate for each app
installers = ["shell", "powershell", "homebrew", "msi"]
# A GitHub repo to push Homebrew formulas to
tap = "gnostr-org/homebrew-gnostr-org"
# Target platforms to build apps for (Rust target-triple syntax)
targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"]
# Path that installers should place binaries in
install-path = "CARGO_HOME"
# Publish jobs to run in CI
publish-jobs = ["homebrew"]
# Whether to install an updater program
install-updater = true
# Skip checking whether the specified configuration files are up to date
allow-dirty = ["ci"]
/// deterministic nostr event build example
// deterministic nostr event build example
use get_file_hash_core::get_file_hash;
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use get_file_hash_core::{get_git_tracked_files, DEFAULT_GNOSTR_KEY, DEFAULT_PICTURE_URL, DEFAULT_BANNER_URL, publish_nostr_event_if_release, get_repo_announcement_event};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use nostr_sdk::{EventBuilder, Keys, Tag, SecretKey};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use std::fs;
use std::path::PathBuf;
use sha2::{Digest, Sha256};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use ::hex;
#[tokio::main]
async fn main() {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let is_git_repo = std::path::Path::new(&manifest_dir).join(".git").exists();
#[cfg(all(not(debug_assertions), feature = "nostr"))]
#[allow(unused_mut)]
let mut git_branch_str = String::new();
println!("cargo:rustc-env=CARGO_PKG_NAME={}", env!("CARGO_PKG_NAME"));
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", env!("CARGO_PKG_VERSION"));
if is_git_repo {
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
let git_branch_output = std::process::Command::new("git")
.args(&["rev-parse", "--abbrev-ref", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for branch name");
let git_branch_str = if git_branch_output.status.success() && !git_branch_output.stdout.is_empty() {
String::from_utf8(git_branch_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git branch command failed or returned empty. Status: {:?}, Stderr: {}",
git_branch_output.status, String::from_utf8_lossy(&git_branch_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_BRANCH={}", git_branch_str);
} else {
println!("cargo:rustc-env=GIT_COMMIT_HASH=");
println!("cargo:rustc-env=GIT_BRANCH=");
}
println!("cargo:rerun-if-changed=.git/HEAD");
//#[cfg(all(not(debug_assertions), feature = "nostr"))]
//let relay_urls = get_file_hash_core::get_relay_urls();
let cargo_toml_hash = get_file_hash!("../Cargo.toml");
println!("cargo:rustc-env=CARGO_TOML_HASH={}", cargo_toml_hash);
let lib_hash = get_file_hash!("../src/lib.rs");
println!("cargo:rustc-env=LIB_HASH={}", lib_hash);
let build_hash = get_file_hash!("../build.rs");
println!("cargo:rustc-env=BUILD_HASH={}", build_hash);
println!("cargo:rerun-if-changed=Cargo.toml");
println!("cargo:rerun-if-changed=src/lib.rs");
println!("cargo:rerun-if-changed=build.rs");
let online_relays_csv_path = PathBuf::from(&manifest_dir).join("src/get_file_hash_core/src/online_relays_gps.csv");
if online_relays_csv_path.exists() {
println!("cargo:rerun-if-changed={}", online_relays_csv_path.to_str().unwrap());
}
#[cfg(all(not(debug_assertions), feature = "nostr"))]
if cfg!(not(debug_assertions)) {
println!("cargo:warning=Nostr feature enabled: Build may take longer due to network operations (publishing events to relays).");
// This code only runs in release builds
let package_version = std::env::var("CARGO_PKG_VERSION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let files_to_publish: Vec<String> = get_git_tracked_files(&PathBuf::from(&manifest_dir));
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
// Create padded_commit_hash
let padded_commit_hash = format!("{:0>64}", &git_commit_hash_str);
println!("cargo:rustc-env=PADDED_COMMIT_HASH={}", padded_commit_hash);
// Initialize client and keys once
let initial_secret_key = SecretKey::parse(&padded_commit_hash).expect("Failed to create Nostr SecretKey from PADDED_COMMIT_HASH");
let initial_keys = Keys::new(initial_secret_key);
let mut client = nostr_sdk::Client::new(initial_keys.clone());
let mut relay_urls = get_file_hash_core::get_relay_urls();
// Add relays to the client
for relay_url in relay_urls.iter() {
if let Err(e) = client.add_relay(relay_url).await {
println!("cargo:warning=Failed to add relay {}: {}", relay_url, e);
}
}
client.connect().await;
println!("cargo:warning=Added and connected to {} relays.", relay_urls.len());
let mut published_event_ids: Vec<Tag> = Vec::new();
let mut total_bytes_sent: usize = 0;
for file_path_str in &files_to_publish {
println!("cargo:warning=Processing file: {}", file_path_str);
match fs::read(file_path_str) {
Ok(bytes) => {
let mut hasher = Sha256::new();
hasher.update(&bytes);
let result = hasher.finalize();
let file_hash_hex = hex::encode(result);
match SecretKey::from_hex(&file_hash_hex.clone()) {
Ok(secret_key) => {
let keys = Keys::new(secret_key);
let content = String::from_utf8_lossy(&bytes).into_owned();
let tags = vec![
Tag::parse(["file", file_path_str].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["version", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
let event_builder = EventBuilder::text_note(content).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(&mut client, file_hash_hex, keys.clone(), event_builder, &mut relay_urls, file_path_str, &output_dir, &mut total_bytes_sent).await {
published_event_ids.push(Tag::event(event_id));
}
// Publish metadata event
get_file_hash_core::publish_metadata_event(
&keys,
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
file_path_str,
).await;
}
Err(e) => {
println!("cargo:warning=Failed to derive Nostr secret key for {}: {}", file_path_str, e);
}
}
}
Err(e) => {
println!("cargo:warning=Failed to read file {}: {}", file_path_str, e);
}
}
}
// Create and publish the build_manifest
if !published_event_ids.is_empty() {
//TODO this will be either the default or detected from env vars PRIVATE_KEY
let keys = Keys::new(SecretKey::from_hex(DEFAULT_GNOSTR_KEY).expect("Failed to create Nostr keys from DEFAULT_GNOSTR_KEY"));
let cloned_keys = keys.clone();
let content = format!("Build manifest for get_file_hash v{}", package_version);
let mut tags = vec![
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
tags.extend(published_event_ids);
let event_builder = EventBuilder::text_note(content.clone()).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(
&mut client,
hex::encode(Sha256::digest(content.as_bytes())),
keys,
event_builder,
&mut relay_urls,
"build_manifest.json",
&output_dir,
&mut total_bytes_sent,
).await {
let build_manifest_event_id = Some(event_id);
// Publish metadata event for the build manifest
get_file_hash_core::publish_metadata_event(
&cloned_keys, // Use reference to cloned keys here
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
&format!("build_manifest:{}", package_version),
).await;
let git_commit_hash = &git_commit_hash_str;
let git_branch = &git_branch_str;
let repo_url = std::env::var("CARGO_PKG_REPOSITORY").unwrap();
let repo_name = std::env::var("CARGO_PKG_NAME").unwrap();
let repo_description = std::env::var("CARGO_PKG_DESCRIPTION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let announcement_keys = Keys::new(SecretKey::from_hex(build_manifest_event_id.unwrap().to_hex().as_str()).expect("Failed to create Nostr keys from build_manifest_event_id"));
let announcement_pubkey_hex = announcement_keys.public_key().to_string();
// Publish NIP-34 Repository Announcement
if let Some(_event_id) = get_repo_announcement_event(
&mut client,
&announcement_keys,
&relay_urls,
&repo_url,
&repo_name,
&repo_description,
&git_commit_hash,
&git_branch,
&output_dir,
&announcement_pubkey_hex
).await {
// Successfully published announcement
}
}
}
println!("cargo:warning=Total bytes sent to Nostr relays: {} bytes ({} MB)", total_bytes_sent, total_bytes_sent as f64 / 1024.0 / 1024.0);
}
}
// deterministic nostr event build example
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost; // MUST use the -tr variant for BIP-340/Nostr
#[cfg(feature = "nostr")]
use rand::thread_rng;
#[cfg(feature = "nostr")]
use serde_json::json;
#[cfg(feature = "nostr")]
use sha2::{Digest, Sha256};
#[cfg(feature = "nostr")]
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
use hex;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut rng = thread_rng();
let (max_signers, min_signers) = (3, 2);
// 1. Setup Nostr Event Metadata
let pubkey_hex = "79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798"; // Example
let created_at = 1712050000;
let kind = 1;
let content = "Hello from ROAST threshold signatures!";
// 2. Serialize for Nostr ID (per NIP-01)
let event_json = json!([
0,
pubkey_hex,
created_at,
kind,
[],
content
]).to_string();
let mut hasher = Sha256::new();
hasher.update(event_json.as_bytes());
let event_id = hasher.finalize(); // This 32-byte hash is our signing message
// 3. FROST/ROAST Key Generation
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
max_signers,
min_signers,
frost::keys::IdentifierList::Default,
&mut rng,
)?;
// 4. ROAST Coordination Simulation (Round 1: Commitments)
// In ROAST, the coordinator keeps a "session" open and collects commitments
let mut session_commitments = BTreeMap::new();
let mut signer_nonces = BTreeMap::new();
// Signers 1 and 3 respond first (Signer 2 is offline/slow)
for &id_val in &[1, 3] {
let id = frost::Identifier::try_from(id_val as u16)?;
let (nonces, comms) = frost::round1::commit(shares[&id].signing_share(), &mut rng);
session_commitments.insert(id, comms);
signer_nonces.insert(id, nonces);
}
// 5. Round 2: Signing the Nostr ID
let signing_package = frost::SigningPackage::new(session_commitments, &event_id);
let mut signature_shares = BTreeMap::new();
for (id, nonces) in signer_nonces {
let key_package: frost::keys::KeyPackage = shares[&id].clone().try_into()?;
let share = frost::round2::sign(&signing_package, &nonces, &key_package)?;
signature_shares.insert(id, share);
}
// 6. Aggregate into a BIP-340 Signature
let group_signature = frost::aggregate(
&signing_package,
&signature_shares,
&pubkey_package,
)?;
// 7. Verification (using BIP-340 logic)
pubkey_package.verifying_key().verify(&event_id, &group_signature)?;
println!("Nostr Event ID: {}", hex::encode(event_id));
println!("Threshold Signature (BIP-340): {}", hex::encode(group_signature.serialize()?));
println!("Successfully signed Nostr event using ROAST/FROST!");
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example frost_bip_340 --features nostr");
}
#[tokio::main]
#[cfg(feature = "nostr")]
async fn main() {
use get_file_hash_core::publish_repository_state;
use nostr_sdk::Keys;
let keys = Keys::generate();
let relay_urls = get_file_hash_core::get_relay_urls();
let d_tag = "my-awesome-repo-example";
let branch_name = "main";
let commit_id = "a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0";
println!("Publishing repository state...");
publish_repository_state!(
&keys,
&relay_urls,
d_tag,
branch_name,
commit_id
);
println!("Repository state published.");
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example publish_repository_state --features nostr");
}
/// deterministic nostr event build example
// deterministic nostr event build example
use get_file_hash_core::get_file_hash;
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use get_file_hash_core::{get_git_tracked_files, DEFAULT_GNOSTR_KEY, DEFAULT_PICTURE_URL, DEFAULT_BANNER_URL, publish_nostr_event_if_release, get_repo_announcement_event};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use nostr_sdk::{EventBuilder, Keys, Tag, SecretKey};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use std::fs;
use std::path::PathBuf;
use sha2::{Digest, Sha256};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use ::hex;
#[cfg(feature = "gen-protos")]
fn compile_protos() {
tonic_prost_build::configure()
.build_server(true)
.build_client(true)
.build_transport(true)
.protoc_arg("--experimental_allow_proto3_optional")
//.compile_protos(&["proto/plugins.proto"], &["proto"])
.compile_protos(&["n34-relay/proto/plugins.proto"], &["n34-relay/proto"])
.expect("protoc is required");
}
#[cfg(not(feature = "gen-protos"))]
fn compile_protos() {}
#[tokio::main]
async fn main() {
compile_protos();
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let is_git_repo = std::path::Path::new(&manifest_dir).join(".git").exists();
#[cfg(all(not(debug_assertions), feature = "nostr"))]
#[allow(unused_mut)]
let mut git_branch_str = String::new();
println!("cargo:rustc-env=CARGO_PKG_NAME={}", env!("CARGO_PKG_NAME"));
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", env!("CARGO_PKG_VERSION"));
if is_git_repo {
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
let git_branch_output = std::process::Command::new("git")
.args(&["rev-parse", "--abbrev-ref", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for branch name");
let git_branch_str = if git_branch_output.status.success() && !git_branch_output.stdout.is_empty() {
String::from_utf8(git_branch_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git branch command failed or returned empty. Status: {:?}, Stderr: {}",
git_branch_output.status, String::from_utf8_lossy(&git_branch_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_BRANCH={}", git_branch_str);
} else {
println!("cargo:rustc-env=GIT_COMMIT_HASH=");
println!("cargo:rustc-env=GIT_BRANCH=");
}
println!("cargo:rerun-if-changed=.git/HEAD");
//#[cfg(all(not(debug_assertions), feature = "nostr"))]
//let relay_urls = get_file_hash_core::get_relay_urls();
let cargo_toml_hash = get_file_hash!("Cargo.toml");
println!("cargo:rustc-env=CARGO_TOML_HASH={}", cargo_toml_hash);
let lib_hash = get_file_hash!("src/lib.rs");
println!("cargo:rustc-env=LIB_HASH={}", lib_hash);
let build_hash = get_file_hash!("build.rs");
println!("cargo:rustc-env=BUILD_HASH={}", build_hash);
println!("cargo:rerun-if-changed=Cargo.toml");
println!("cargo:rerun-if-changed=src/lib.rs");
println!("cargo:rerun-if-changed=build.rs");
let online_relays_csv_path = PathBuf::from(&manifest_dir).join("src/get_file_hash_core/src/online_relays_gps.csv");
if online_relays_csv_path.exists() {
println!("cargo:rerun-if-changed={}", online_relays_csv_path.to_str().unwrap());
}
#[cfg(all(not(debug_assertions), feature = "nostr"))]
if cfg!(not(debug_assertions)) {
println!("cargo:warning=Nostr feature enabled: Build may take longer due to network operations (publishing events to relays).");
// This code only runs in release builds
let package_version = std::env::var("CARGO_PKG_VERSION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let files_to_publish: Vec<String> = get_git_tracked_files(&PathBuf::from(&manifest_dir));
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
// Create padded_commit_hash
let padded_commit_hash = format!("{:0>64}", &git_commit_hash_str);
println!("cargo:rustc-env=PADDED_COMMIT_HASH={}", padded_commit_hash);
// Initialize client and keys once
let initial_secret_key = SecretKey::parse(&padded_commit_hash).expect("Failed to create Nostr SecretKey from PADDED_COMMIT_HASH");
let initial_keys = Keys::new(initial_secret_key);
let mut client = nostr_sdk::Client::new(initial_keys.clone());
let mut relay_urls = get_file_hash_core::get_relay_urls();
// Add relays to the client
for relay_url in relay_urls.iter() {
if let Err(e) = client.add_relay(relay_url).await {
println!("cargo:warning=Failed to add relay {}: {}", relay_url, e);
}
}
client.connect().await;
println!("cargo:warning=Added and connected to {} relays.", relay_urls.len());
let mut published_event_ids: Vec<Tag> = Vec::new();
let mut total_bytes_sent: usize = 0;
for file_path_str in &files_to_publish {
println!("cargo:warning=Processing file: {}", file_path_str);
match fs::read(file_path_str) {
Ok(bytes) => {
let mut hasher = Sha256::new();
hasher.update(&bytes);
let result = hasher.finalize();
let file_hash_hex = hex::encode(result);
match SecretKey::from_hex(&file_hash_hex.clone()) {
Ok(secret_key) => {
let keys = Keys::new(secret_key);
let content = String::from_utf8_lossy(&bytes).into_owned();
let tags = vec![
Tag::parse(["file", file_path_str].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["version", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
let event_builder = EventBuilder::text_note(content).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(&mut client, file_hash_hex, keys.clone(), event_builder, &mut relay_urls, file_path_str, &output_dir, &mut total_bytes_sent).await {
published_event_ids.push(Tag::event(event_id));
}
// Publish metadata event
get_file_hash_core::publish_metadata_event(
&keys,
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
file_path_str,
).await;
}
Err(e) => {
println!("cargo:warning=Failed to derive Nostr secret key for {}: {}", file_path_str, e);
}
}
}
Err(e) => {
println!("cargo:warning=Failed to read file {}: {}", file_path_str, e);
}
}
}
// Create and publish the build_manifest
if !published_event_ids.is_empty() {
//TODO this will be either the default or detected from env vars PRIVATE_KEY
let keys = Keys::new(SecretKey::from_hex(DEFAULT_GNOSTR_KEY).expect("Failed to create Nostr keys from DEFAULT_GNOSTR_KEY"));
let cloned_keys = keys.clone();
let content = format!("Build manifest for get_file_hash v{}", package_version);
let mut tags = vec![
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
tags.extend(published_event_ids);
let event_builder = EventBuilder::text_note(content.clone()).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(
&mut client,
hex::encode(Sha256::digest(content.as_bytes())),
keys,
event_builder,
&mut relay_urls,
"build_manifest.json",
&output_dir,
&mut total_bytes_sent,
).await {
let build_manifest_event_id = Some(event_id);
// Publish metadata event for the build manifest
get_file_hash_core::publish_metadata_event(
&cloned_keys, // Use reference to cloned keys here
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
&format!("build_manifest:{}", package_version),
).await;
let git_commit_hash = &git_commit_hash_str;
let git_branch = &git_branch_str;
let repo_url = std::env::var("CARGO_PKG_REPOSITORY").unwrap();
let repo_name = std::env::var("CARGO_PKG_NAME").unwrap();
let repo_description = std::env::var("CARGO_PKG_DESCRIPTION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let announcement_keys = Keys::new(SecretKey::from_hex(build_manifest_event_id.unwrap().to_hex().as_str()).expect("Failed to create Nostr keys from build_manifest_event_id"));
let announcement_pubkey_hex = announcement_keys.public_key().to_string();
// Publish NIP-34 Repository Announcement
if let Some(_event_id) = get_repo_announcement_event(
&mut client,
&announcement_keys,
&relay_urls,
&repo_url,
&repo_name,
&repo_description,
&git_commit_hash,
&git_branch,
&output_dir,
&announcement_pubkey_hex
).await {
// Successfully published announcement
}
}
}
println!("cargo:warning=Total bytes sent to Nostr relays: {} bytes ({} MB)", total_bytes_sent, total_bytes_sent as f64 / 1024.0 / 1024.0);
}
}
// deterministic nostr event build example
/// BIP-64MOD + GCC: Complete Git Empty & Genesis Constants
///
/// This module provides the standard cryptographic identifiers for "null",
/// "empty", and "genesis" states, including NIP-19 (Bech32) identities.
pub struct GitEmptyState;
impl GitEmptyState {
// === NULL REFERENCE (Zero Hash) ===
pub const NULL_SHA256: &'static str = "0000000000000000000000000000000000000000000000000000000000000000";
// === EMPTY BLOB (Empty File) ===
pub const BLOB_SHA1: &'static str = "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391";
pub const BLOB_SHA256: &'static str = "473a0f4c3be8a93681a267e3b1e9a7dcda1185436fe141f7749120a303721813";
pub const BLOB_NSEC: &'static str = "nsec1guaq7npmaz5ndqdzvl3mr6d8mndprp2rdls5ram5jys2xqmjrqfsdzhrp6";
pub const BLOB_NPUB: &'static str = "npub180cvv07tjdrghvkyh6964p7w9vsqpf3p05868v399v86p8y6f69sq5fdp0";
// === EMPTY TREE (Empty Directory) ===
pub const TREE_SHA1: &'static str = "4b825dc642cb6eb9a060e54bf8d69288fbee4904";
pub const TREE_SHA256: &'static str = "6ef19b41225c5369f1c104d45d8d85efa9b057b53b14b4b9b939dd74decc5321";
pub const TREE_NSEC: &'static str = "nsec1dmceksfzt3fknuwpqn29mrv9a75mq4a48v2tfwde88whfhkv2vsslsc46c";
pub const TREE_NPUB: &'static str = "npub1pxmpep6yk7z6p332u9588k0vscg26rv29pynvscg26rv29pynvsq6erdfh";
// === GENESIS COMMIT (DeepSpaceM1 @ Epoch 0) ===
/// Result of: git commit --allow-empty -m 'Initial commit'
/// With Author/Committer: DeepSpaceM1 <
[email protected]> @ 1970-01-01T00:00:00Z
pub const GENESIS_AUTHOR_NAME: &'static str = "DeepSpaceM1";
pub const GENESIS_AUTHOR_EMAIL: &'static str = "
[email protected]";
pub const GENESIS_DATE_UNIX: i64 = 0;
pub const GENESIS_MESSAGE: &'static str = "Initial commit";
/// The resulting SHA-256 Commit Hash for this specific configuration
pub const GENESIS_COMMIT_SHA256: &'static str = "e9768652d87e07663479a0ad402513f56d953930b659c2ef389d4d03d3623910";
/// The NIP-19 Identity associated with the Genesis Commit
pub const GENESIS_NSEC: &'static str = "nsec1jpxmpep6yk7z6p332u9588k0vscg26rv29pynvscg26rv29pynvsq68at9d";
pub const GENESIS_NPUB: &'static str = "npub1pxmpep6yk7z6p332u9588k0vscg26rv29pynvscg26rv29pynvsq6erdfh";
}
/// Helper for constructing the commit object string for hashing
pub mod builders {
use super::GitEmptyState;
pub fn build_genesis_commit_object() -> String {
format!(
"tree {}\nauthor {} <{}> {} +0000\ncommitter {} <{}> {} +0000\n\n{}\n",
GitEmptyState::TREE_SHA256,
GitEmptyState::GENESIS_AUTHOR_NAME,
GitEmptyState::GENESIS_AUTHOR_EMAIL,
GitEmptyState::GENESIS_DATE_UNIX,
GitEmptyState::GENESIS_AUTHOR_NAME,
GitEmptyState::GENESIS_AUTHOR_EMAIL,
GitEmptyState::GENESIS_DATE_UNIX,
GitEmptyState::GENESIS_MESSAGE
)
}
}
fn main() {
println!("--- BIP-64MOD + GCC Genesis State ---");
println!("Commit Hash: {}", GitEmptyState::GENESIS_COMMIT_SHA256);
println!("Author: {} <{}>", GitEmptyState::GENESIS_AUTHOR_NAME, GitEmptyState::GENESIS_AUTHOR_EMAIL);
println!("Timestamp: {}", GitEmptyState::GENESIS_DATE_UNIX);
println!("NSEC: {}", GitEmptyState::GENESIS_NSEC);
let object_raw = builders::build_genesis_commit_object();
println!("\nRaw Git Commit Object:\n---\n{}---", object_raw);
}
name: Rust
on:
push:
branches: [ "*" ]
pull_request:
branches: [ "*" ]
env:
CARGO_TERM_COLOR: always
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
RUST_LOG: info
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-15-intel, macos-latest, windows-latest]
features_args: ["", "--no-default-features", "--features nostr"]
steps:
- uses: actions/checkout@v4
- name: Install system deps (dbus)
if: runner.os == 'Linux'
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev
- name: Install protobuf (protoc)
shell: bash
run: |
set -euxo pipefail
if [[ "${{ runner.os }}" == "macOS" ]]; then
brew update
brew install protobuf
elif [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y protobuf-compiler
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install -y protoc
fi
- name: Build ${{ matrix.features_args }}
run: cargo build --workspace --verbose ${{ matrix.features_args }}
- name: Run workspace tests ${{ matrix.features_args }}
run: |
cargo test --workspace ${{ matrix.features_args }} -- --test-threads 1
- name: Run get_file_hash_core tests ${{ matrix.features_args }}
shell: bash
run: |
if [[ "${{ matrix.features_args }}" == "--features nostr" ]]; then
cargo test -p get_file_hash_core ${{ matrix.features_args }} -- --test-threads 1 --nocapture
else
cargo test -p get_file_hash_core ${{ matrix.features_args }} -- --test-threads 1
fi
- name: Run get_file_hash tests ${{ matrix.features_args }}
shell: bash
run: |
if [[ "${{ matrix.features_args }}" == "--features nostr" ]]; then
cargo test -p get_file_hash ${{ matrix.features_args }} -- --test-threads 1 --nocapture
else
cargo test -p get_file_hash ${{ matrix.features_args }} -- --test-threads 1
fi
- name: Build Release ${{ matrix.features_args }}
run: cargo build --workspace --release ${{ matrix.features_args }}
#[cfg(feature = "nostr")]
use rand_chacha::ChaCha20Rng;
#[cfg(feature = "nostr")]
use rand_chacha::rand_core::SeedableRng;
#[cfg(feature = "nostr")]
use hex;
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use frost::keys::IdentifierList;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Create a deterministic seed (e.g., 32 bytes of zeros or a Git Hash)
let seed_hex = "473a0f4c3be8a93681a267e3b1e9a7dcda1185436fe141f7749120a303721813";
let seed_bytes = hex::decode(seed_hex)?;
let mut rng = ChaCha20Rng::from_seed(seed_bytes.try_into().map_err(|_| "Invalid seed length")?);
let max_signers = 3;
let min_signers = 2;
////////////////////////////////////////////////////////////////////////////
// Round 0: Key Generation (Trusted Dealer)
////////////////////////////////////////////////////////////////////////////
// Using IdentifierList::Default creates identifiers 1, 2, 3...
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
max_signers,
min_signers,
IdentifierList::Default,
&mut rng,
)?;
println!("--- Deterministic FROST Dealer ---");
println!("Threshold: {} of {}", min_signers, max_signers);
println!("Number of shares generated: {}", shares.len());
println!("\n--- Verifying Shares Against Commitments ---");
for (identifier, share) in &shares {
// The Deterministic Values (Scalar Hex)
// Because your seed is fixed to the EMPTY_BLOB_SHA256,
// the "redacted" values in your output are always the same.
// Here are the Secret Signing Shares (the private scalars) for your 2-of-3 setup:
//
// Participant,Identifier (x),Signing Share (f(x)) in Hex
// Participant 1,...0001,757f49553754988450d995c65a0459a0f5a703d7c585f95f468202d09a365f57
// Participant 2,...0002,a3c4835e32308cb11b43968962290bc9171f1f1ca90c21741890e4f326f9879b
// Participant 3,...0003,d209bd672d0c80dd65ad974c6a4dc1f138973a618c924988eaaa0715b3bcafdf
//
// println!("Participant Identifier: {:?} {:?}", identifier, _share);
//
// In FROST, the 'verify' method checks the share against the VSS commitment
match share.verify() {
Ok(_) => {
println!("Participant {:?}: Valid ✅", identifier);
}
Err(e) => {
println!("Participant {:?}: INVALID! ❌ Error: {:?}", identifier, e);
}
}
}
let pubkey_bytes = pubkey_package.verifying_key().serialize()?;
println!("Group Public Key (Hex Compressed): {}", hex::encode(&pubkey_bytes));
let x_only_hex = hex::encode(&pubkey_bytes[1..]);
println!("Group Public Key (Hex X-Only): {}", x_only_hex);
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("Run with --features nostr to enable this example.");
}
#[tokio::main]
#[cfg(feature = "nostr")]
async fn main() {
use get_file_hash_core::publish_pull_request;
use nostr_sdk::Keys;
use nostr_sdk::EventId;
use std::str::FromStr;
let keys = Keys::generate();
let relay_urls = get_file_hash_core::get_relay_urls();
let d_tag = "my-awesome-repo-example";
let commit_id = "0123456789abcdef0123456789abcdef01234567";
let clone_url = "
[email protected]:user/my-feature-branch.git";
let title = Some("Feat: Add new awesome feature example");
// Dummy EventId for examples that require a build_manifest_event_id
const DUMMY_BUILD_MANIFEST_ID_STR: &str = "f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0";
let dummy_build_manifest_id = EventId::from_str(DUMMY_BUILD_MANIFEST_ID_STR).unwrap();
// Example 1: Without title and build_manifest_event_id
println!("Publishing pull request without title and build_manifest_event_id...");
publish_pull_request!(
&keys,
&relay_urls,
d_tag,
commit_id,
clone_url
);
println!("Pull request without title and build_manifest_event_id published.");
// Example 2: With title but without build_manifest_event_id
println!("Publishing pull request with title but without build_manifest_event_id...");
publish_pull_request!(
&keys,
&relay_urls,
d_tag,
commit_id,
clone_url,
title
);
println!("Pull request with title but without build_manifest_event_id published.");
// Example 3: With build_manifest_event_id but without title
println!("Publishing pull request with build_manifest_event_id but without title...");
publish_pull_request!(
&keys,
&relay_urls,
d_tag,
commit_id,
clone_url,
None, // Explicitly pass None for title
Some(&dummy_build_manifest_id)
);
println!("Pull request with build_manifest_event_id but without title published.");
// Example 4: With title and build_manifest_event_id
println!("Publishing pull request with title and build_manifest_event_id...");
publish_pull_request!(
&keys,
&relay_urls,
d_tag,
commit_id,
clone_url,
title,
Some(&dummy_build_manifest_id)
);
println!("Pull request with title and build_manifest_event_id published.");
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example publish_pull_request --features nostr");
}
# `get_file_hash` macro
This project provides a Rust procedural macro, `get_file_hash!`, designed to compute the SHA-256 hash of a specified file at compile time. This hash is then embedded directly into your compiled executable. This feature is invaluable for:
* **Integrity Verification:** Ensuring the deployed code hasn't been tampered with.
* **Versioning:** Embedding a unique identifier linked to the exact source code version.
* **Cache Busting:** Generating unique names for assets based on their content.
## Project Structure
* `get_file_hash_core`: A foundational crate containing the `get_file_hash!` macro definition.
* `get_file_hash`: The main library crate that re-exports the macro.
* `src/bin/get_file_hash.rs`: An example executable demonstrating the macro's usage by hashing its own source file and updating this `README.md`.
* `build.rs`: A build script that also utilizes the `get_file_hash!` macro to hash `Cargo.toml` during the build process.
## Usage of `get_file_hash!` Macro
To use the `get_file_hash!` macro, ensure you have `get_file_hash` (or `get_file_hash_core` for direct usage) as a dependency in your `Cargo.toml`.
### Example
```rust
use get_file_hash::get_file_hash;
use get_file_hash::CARGO_TOML_HASH;
use sha2::{Digest, Sha256};
fn main() {
// The macro resolves the path relative to CARGO_MANIFEST_DIR
let readme_hash = get_file_hash!("src/bin/readme.rs");
let lib_hash = get_file_hash!("src/lib.rs");
println!("The SHA-256 hash of src/lib.rs is: {}", lib_hash);
println!("The SHA-256 hash of src/bin/readme.rs is: {}", readme_hash);
println!("The SHA-256 hash of Cargo.toml is: {}", CARGO_TOML_HASH);
}
```
## Release
## [`README.md`](./README.md)
```bash
cargo run --bin readme > README.md
```
## [`src/bin/readme.rs`](src/bin/readme.rs)
* **Target File:** `src/bin/readme.rs`
## NIP-34 Integration: Git Repository Events on Nostr
This library provides a set of powerful macros and functions for integrating Git repository events with the Nostr protocol, adhering to the [NIP-34: Git Repositories on Nostr](https://github.com/nostr-protocol/nips/blob/master/34.md) specification.
These tools allow you to publish various Git-related events to Nostr relays, enabling decentralized tracking and collaboration for your code repositories.
### Available NIP-34 Macros
Each macro provides a convenient way to publish specific NIP-34 event kinds:
* [`repository_announcement!`](#repository_announcement)
* Publishes a `Repository Announcement` event (Kind 30617) to announce a new or updated Git repository.
* [`publish_patch!`](#publish_patch)
* Publishes a `Patch` event (Kind 1617) containing a Git patch (diff) for a specific commit.
* [`publish_pull_request!`](#publish_pull_request)
* Publishes a `Pull Request` event (Kind 1618) to propose changes and facilitate code review.
* [`publish_pr_update!`](#publish_pr_update)
* Publishes a `Pull Request Update` event (Kind 1619) to update an existing pull request.
* [`publish_repository_state!`](#publish_repository_state)
* Publishes a `Repository State` event (Kind 1620) to announce the current state of a branch (e.g., its latest commit).
* [`publish_issue!`](#publish_issue)
* Publishes an `Issue` event (Kind 1621) to report bugs, request features, or track tasks.
### Running NIP-34 Examples
To see these macros in action, navigate to the `examples/` directory and run each example individually with the `nostr` feature enabled:
```bash
cargo run --example repository_announcement --features nostr
cargo run --example publish_patch --features nostr
cargo run --example publish_pull_request --features nostr
cargo run --example publish_pr_update --features nostr
cargo run --example publish_repository_state --features nostr
cargo run --example publish_issue --features nostr
```
* **SHA-256 Hash:** 6c6325c5a4c14f44cbda6ca53179ab3d6666ce7c916365668c6dd1d79215db59
* **Status:** Integrity Verified..
##
## [`build.rs`](build.rs)
* **Target File:** `build.rs`
* **SHA-256 Hash:** 20c958c8cbb5c77cf5eb3763b6da149b61241d328df52d39b7aa97903305c889
* **Status:** Integrity Verified..
##
## [`Cargo.toml`](Cargo.toml)
* **Target File:** `Cargo.toml`
* **SHA-256 Hash:** e3f392bf23b5fb40902acd313a8c76d1943060b6805ea8615de62f9baf0c6513
* **Status:** Integrity Verified..
##
## [`src/lib.rs`](src/lib.rs)
* **Target File:** `src/lib.rs`
* **SHA-256 Hash:** 591593482a6c9aac8793aa1e488e613f52a4effb1ec3465fd9d6a54537f2b123
* **Status:** Integrity Verified..
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
get_file_hash_core::frost_mailbox_logic::simulate_frost_mailbox_post_signer()
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example frost_mailbox_post --features nostr");
}
# This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
#
# * checks for a Git Tag that looks like a release
# * builds artifacts with dist (archives, installers, hashes)
# * uploads those artifacts to temporary workflow zip
# * on success, uploads the artifacts to a GitHub Release
#
# Note that the GitHub Release will be created with a generated
# title/body based on your changelogs.
name: Release
permissions:
"contents": "write"
# This task will run whenever you push a git tag that looks like a version
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
#
# If PACKAGE_NAME is specified, then the announcement will be for that
# package (erroring out if it doesn't have the given version or isn't dist-able).
#
# If PACKAGE_NAME isn't specified, then the announcement will be for all
# (dist-able) packages in the workspace with that version (this mode is
# intended for workspaces with only one dist-able package, or with all dist-able
# packages versioned/released in lockstep).
#
# If you push multiple tags at once, separate instances of this workflow will
# spin up, creating an independent announcement for each one. However, GitHub
# will hard limit this to 3 tags per commit, as it will assume more tags is a
# mistake.
#
# If there's a prerelease-style suffix to the version, then the release(s)
# will be marked as a prerelease.
on:
pull_request:
push:
tags:
- '**[0-9]+.[0-9]+.[0-9]+*'
jobs:
# Run 'dist plan' (or host) to determine what tasks we need to do
install-deps:
runs-on: "ubuntu-latest"
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ !github.event.pull_request && github.ref_name || '' }}
tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }}
publishing: ${{ !github.event.pull_request }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install-deps
shell: bash
run: |
if [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev curl unzip
# Install a pinned modern protoc
PROTOC_ZIP=protoc-25.3-linux-x86_64.zip
curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v25.3/$PROTOC_ZIP
sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc
sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*'
rm -f $PROTOC_ZIP
elif [[ "${{ runner.os }}" == "macOS" ]]; then
brew install protobuf
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install protoc --no-progress
echo "C:\ProgramData\chocolatey\bin" >> $GITHUB_PATH
fi
protoc --version
plan:
needs:
- install-deps
runs-on: "ubuntu-latest"
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ !github.event.pull_request && github.ref_name || '' }}
tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }}
publishing: ${{ !github.event.pull_request }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.3/cargo-dist-installer.sh | sh"
- name: Cache dist
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/dist
# sure would be cool if github gave us proper conditionals...
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
# functionality based on whether this is a pull_request, and whether it's from a fork.
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
# but also really annoying to build CI around when it needs secrets to work right.)
- id: plan
run: |
dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "dist ran successfully"
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
name: artifacts-plan-dist-manifest
path: plan-dist-manifest.json
# Build and packages all the platform-specific things
build-local-artifacts:
name: build-local-artifacts (${{ join(matrix.targets, ', ') }})
# Let the initial task tell us to not run (currently very blunt)
needs:
- plan
- install-deps
if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }}
strategy:
fail-fast: false
# Target platforms/runners are computed by dist in create-release.
# Each member of the matrix has the following arguments:
#
# - runner: the github runner
# - dist-args: cli flags to pass to dist
# - install-dist: expression to run to install dist on the runner
#
# Typically there will be:
# - 1 "global" task that builds universal installers
# - N "local" tasks that build each platform's binaries and platform-specific installers
matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }}
runs-on: ${{ matrix.runner }}
container: ${{ matrix.container && matrix.container.image || null }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json
steps:
- name: enable windows longpaths
run: |
git config --global core.longpaths true
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install Rust non-interactively if not already installed
if: ${{ matrix.container }}
run: |
if ! command -v cargo > /dev/null 2>&1; then
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
fi
- name: Install dist
run: ${{ matrix.install_dist.run }}
# Get the dist-manifest
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- name: Install dependencies
run: |
${{ matrix.packages_install }}
- name: Install-deps
shell: bash
run: |
if [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev curl unzip
# Install a pinned modern protoc
PROTOC_ZIP=protoc-25.3-linux-x86_64.zip
curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v25.3/$PROTOC_ZIP
sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc
sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*'
rm -f $PROTOC_ZIP
elif [[ "${{ runner.os }}" == "macOS" ]]; then
brew install protobuf
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install protoc --no-progress
echo "C:\ProgramData\chocolatey\bin" >> $GITHUB_PATH
fi
protoc --version
- name: Configure Cargo for Windows builds
if: runner.os == 'Windows'
shell: bash
run: |
# Put target dir somewhere short and less likely to be locked
echo "CARGO_TARGET_DIR=D:/cargo-target" >> $GITHUB_ENV
# Reduce file-handle pressure / parallel writes
echo "CARGO_BUILD_JOBS=2" >> $GITHUB_ENV
# Avoid incremental artifacts (less churn, fewer locks)
echo "CARGO_INCREMENTAL=0" >> $GITHUB_ENV
- name: Build artifacts
run: |
# Actually do builds and make zips and whatnot
dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json
echo "dist ran successfully"
- id: cargo-dist
name: Post-build
# We force bash here just because github makes it really hard to get values up
# to "real" actions without writing to env-vars, and writing to env-vars has
# inconsistent syntax between shell and powershell.
shell: bash
run: |
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@v4
with:
name: artifacts-build-local-${{ join(matrix.targets, '_') }}
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
# Build and package all the platform-agnostic(ish) things
build-global-artifacts:
needs:
- plan
- install-deps
- build-local-artifacts
runs-on: "ubuntu-latest"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
- name: Install-deps
shell: bash
run: |
if [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev curl unzip
# Install a pinned modern protoc
PROTOC_ZIP=protoc-25.3-linux-x86_64.zip
curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v25.3/$PROTOC_ZIP
sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc
sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*'
rm -f $PROTOC_ZIP
elif [[ "${{ runner.os }}" == "macOS" ]]; then
brew install protobuf
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install protoc --no-progress
echo "C:\ProgramData\chocolatey\bin" >> $GITHUB_PATH
fi
protoc --version
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: cargo-dist
shell: bash
run: |
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "dist ran successfully"
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@v4
with:
name: artifacts-build-global
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
# Determines if we should publish/announce
host:
needs:
- plan
- install-deps
- build-local-artifacts
- build-global-artifacts
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
runs-on: "ubuntu-latest"
outputs:
val: ${{ steps.host.outputs.manifest }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: host
shell: bash
run: |
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
echo "artifacts uploaded and released successfully"
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
# Overwrite the previous copy
name: artifacts-dist-manifest
path: dist-manifest.json
# Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts"
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: artifacts
merge-multiple: true
- name: Cleanup
run: |
# Remove the granular manifests
rm -f artifacts/*-dist-manifest.json
- name: Create GitHub Release
env:
PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}"
ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}"
ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}"
RELEASE_COMMIT: "${{ github.sha }}"
run: |
# Write and read notes from a file to avoid quoting breaking things
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
publish-homebrew-formula:
needs:
- plan
- host
- install-deps
- build-local-artifacts
- build-global-artifacts
runs-on: "ubuntu-latest"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PLAN: ${{ needs.plan.outputs.val }}
GITHUB_USER: "axo bot"
GITHUB_EMAIL: "
[email protected]"
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: true
repository: "gnostr-org/homebrew-gnostr"
token: ${{ secrets.HOMEBREW_TAP_TOKEN }}
# So we have access to the formula
- name: Fetch homebrew formulae
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: Formula/
merge-multiple: true
# This is extra complex because you can make your Formula name not match your app name
# so we need to find releases with a *.rb file, and publish with that filename.
- name: Commit formula files
run: |
git config --global user.name "${GITHUB_USER}"
git config --global user.email "${GITHUB_EMAIL}"
for release in $(echo "$PLAN" | jq --compact-output '.releases[] | select([.artifacts[] | endswith(".rb")] | any)'); do
filename=$(echo "$release" | jq '.artifacts[] | select(endswith(".rb"))' --raw-output)
name=$(echo "$filename" | sed "s/\.rb$//")
version=$(echo "$release" | jq .app_version --raw-output)
export PATH="/home/linuxbrew/.linuxbrew/bin:$PATH"
brew update
# We avoid reformatting user-provided data such as the app description and homepage.
brew style --except-cops FormulaAudit/Homepage,FormulaAudit/Desc,FormulaAuditStrict --fix "Formula/${filename}" || true
git add "Formula/${filename}"
git commit -m "${name} ${version}"
done
git push
announce:
needs:
- plan
- install-deps
- build-local-artifacts
- build-global-artifacts
- host
- publish-homebrew-formula
# use "always() && ..." to allow us to wait for all publish jobs while
# still allowing individual publish jobs to skip themselves (for prereleases).
# "host" however must run to completion, no skipping allowed!
if: ${{ always() && needs.host.result == 'success' && (needs.publish-homebrew-formula.result == 'skipped' || needs.publish-homebrew-formula.result == 'success') }}
runs-on: "ubuntu-latest"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
/// Usage: cargo run --example cli-parser --features nostr
#[cfg(not(feature = "nostr"))]
fn main() {
println!("Run with --features nostr to enable this example.");
}
#[cfg(feature = "nostr")]
use clap::{Parser, Subcommand};
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use frost::round1::{self, SigningCommitments, SigningNonces};
#[cfg(feature = "nostr")]
use frost::keys::IdentifierList;
#[cfg(feature = "nostr")]
use rand_chacha::ChaCha20Rng;
#[cfg(feature = "nostr")]
use rand::SeedableRng;
#[cfg(feature = "nostr")]
use std::fs;
#[cfg(feature = "nostr")]
use std::path::PathBuf;
#[cfg(feature = "nostr")]
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
#[derive(Parser)]
#[cfg(feature = "nostr")]
#[command(name = "gnostr-frost")]
#[cfg(feature = "nostr")]
#[command(version = "0.1.0")]
#[cfg(feature = "nostr")]
#[command(about = "BIP-64MOD + GCC Threshold Signature Tool", long_about = None)]
#[cfg(feature = "nostr")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[cfg(feature = "nostr")]
#[derive(Subcommand)]
#[cfg(feature = "nostr")]
enum Commands {
/// Step 1: Generate a new T-of-N key set (Dealer Mode)
Keygen {
#[arg(long, default_value_t = 2)]
threshold: u16,
#[arg(long, default_value_t = 3)]
total: u16,
#[arg(short, long)]
output_dir: Option<PathBuf>,
},
/// Step 2: Generate a batch of public/private nonces
Batch {
#[arg(short, long, default_value_t = 10)]
count: u16,
#[arg(short, long)]
key: PathBuf,
},
/// Step 3: Sign a message hash using a vaulted nonce index
Sign {
#[arg(short, long)]
message: String,
#[arg(short, long)]
index: u64,
#[arg(short, long)]
key: PathBuf,
#[arg(short, long)]
vault: PathBuf,
},
/// Step 4: Aggregate shares into a final BIP-340 signature
Aggregate {
#[arg(short, long)]
message: String,
#[arg(required = true)]
shares: Vec<String>,
},
/// Step 5: Verify a BIP-340 signature against the group public key
Verify {
#[arg(short, long)]
message: String,
#[arg(short, long)]
signature: String,
#[arg(short, long)]
public_key: PathBuf,
},
}
#[cfg(feature = "nostr")]
type NonceMap = BTreeMap<u32, SigningNonces>;
#[cfg(feature = "nostr")]
type CommitmentMap = BTreeMap<u32, SigningCommitments>;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let cli = Cli::parse();
match &cli.command {
Commands::Keygen { threshold, total, output_dir } => {
println!("🛠️ Executing Keygen: {}-of-{}...", threshold, total);
let mut rng = ChaCha20Rng::from_entropy();
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
*total, *threshold, IdentifierList::Default, &mut rng
)?;
let path = output_dir.as_deref().unwrap_or(std::path::Path::new("."));
let pub_path = path.join("group_public.json");
fs::write(&pub_path, serde_json::to_string_pretty(&pubkey_package)?)?;
println!("✅ Saved Group Public Key to {:?}", pub_path);
for (id, share) in shares {
let key_pkg = frost::keys::KeyPackage::new(
id,
*share.signing_share(),
frost::keys::VerifyingShare::from(*share.signing_share()),
*pubkey_package.verifying_key(),
*threshold,
);
let id_hex = hex::encode(id.serialize());
let file_name = format!("p{}_key.json", id_hex);
fs::write(path.join(file_name), serde_json::to_string_pretty(&key_pkg)?)?;
}
}
Commands::Batch { count, key } => {
println!("📦 Executing Batch...");
let key_pkg: frost::keys::KeyPackage = serde_json::from_str(&fs::read_to_string(key)?)?;
let mut rng = ChaCha20Rng::from_entropy();
let mut public_commitments = CommitmentMap::new();
let mut secret_nonce_vault = NonceMap::new();
for i in 0..*count {
let (nonces, commitments) = round1::commit(key_pkg.signing_share(), &mut rng);
public_commitments.insert(i as u32, commitments);
secret_nonce_vault.insert(i as u32, nonces);
}
let id_hex = hex::encode(key_pkg.identifier().serialize());
fs::write(format!("p{}_vault.json", id_hex), serde_json::to_string(&secret_nonce_vault)?)?;
fs::write(format!("p{}_public_comms.json", id_hex), serde_json::to_string(&public_commitments)?)?;
println!("✅ Nonces and Commitments saved for ID {}", id_hex);
}
Commands::Sign { message, index, key, vault } => {
println!("✍️ Executing Sign: Index #{}...", index);
let key_pkg: frost::keys::KeyPackage = serde_json::from_str(&fs::read_to_string(key)?)?;
let mut vault_data: NonceMap = serde_json::from_str(&fs::read_to_string(vault)?)?;
let signing_nonces = vault_data.remove(&(*index as u32)).ok_or("Nonce not found!")?;
fs::write(vault, serde_json::to_string(&vault_data)?)?;
let mut commitments_map = BTreeMap::new();
commitments_map.insert(*key_pkg.identifier(), *signing_nonces.commitments());
// Discovery logic for peers
for entry in fs::read_dir(".")? {
let path = entry?.path();
let fname = path.file_name().unwrap().to_str().unwrap();
if fname.starts_with('p') && fname.contains("_public_comms.json") {
let id_hex = fname.strip_prefix('p').unwrap().strip_suffix("_public_comms.json").unwrap();
let peer_id: frost::Identifier = serde_json::from_str(&format!("\"{}\"", id_hex))?;
if peer_id != *key_pkg.identifier() {
let peer_comms: CommitmentMap = serde_json::from_str(&fs::read_to_string(&path)?)?;
if let Some(c) = peer_comms.get(&(*index as u32)) {
commitments_map.insert(peer_id, *c);
}
}
}
}
let signing_package = frost::SigningPackage::new(commitments_map, message.as_bytes());
let share = frost::round2::sign(&signing_package, &signing_nonces, &key_pkg)?;
let share_file = format!("p{}_share.json", hex::encode(key_pkg.identifier().serialize()));
fs::write(&share_file, serde_json::to_string(&share)?)?;
println!("✅ Share saved to {}", share_file);
}
Commands::Aggregate { message, shares } => {
println!("🧬 Executing Aggregate...");
let pubkey_package: frost::keys::PublicKeyPackage = serde_json::from_str(&fs::read_to_string("group_public.json")?)?;
let mut commitments_map = BTreeMap::new();
let mut signature_shares = BTreeMap::new();
for share_path in shares {
let share: frost::round2::SignatureShare = serde_json::from_str(&fs::read_to_string(share_path)?)?;
let fname = std::path::Path::new(share_path).file_name().unwrap().to_str().unwrap();
let id_hex = fname.strip_prefix('p').unwrap().strip_suffix("_share.json").unwrap();
let peer_id: frost::Identifier = serde_json::from_str(&format!("\"{}\"", id_hex))?;
let comms_file = format!("p{}_public_comms.json", id_hex);
let peer_comms: CommitmentMap = serde_json::from_str(&fs::read_to_string(comms_file)?)?;
commitments_map.insert(peer_id, *peer_comms.get(&0).unwrap());
signature_shares.insert(peer_id, share);
}
let signing_package = frost::SigningPackage::new(commitments_map, message.as_bytes());
let group_sig = frost::aggregate(&signing_package, &signature_shares, &pubkey_package)?;
let sig_hex = hex::encode(group_sig.serialize()?);
println!("✅ Aggregation Successful!\nFinal BIP-340 Signature: {}", sig_hex);
fs::write("final_signature.json", serde_json::to_string(&group_sig)?)?;
}
Commands::Verify { message, signature, public_key } => {
println!("🔍 Executing Verify...");
let pubkey_package: frost::keys::PublicKeyPackage = serde_json::from_str(&fs::read_to_string(public_key)?)?;
let sig_bytes = hex::decode(signature)?;
let group_sig = frost::Signature::deserialize(&sig_bytes)?;
match pubkey_package.verifying_key().verify(message.as_bytes(), &group_sig) {
Ok(_) => println!("✅ SUCCESS: The signature is VALID!"),
Err(_) => println!("❌ FAILURE: Invalid signature."),
}
}
}
Ok(())
}
#[tokio::main]
#[cfg(feature = "nostr")]
async fn main() {
use get_file_hash_core::publish_pr_update;
use nostr_sdk::Keys;
use nostr_sdk::EventId;
use std::str::FromStr;
let keys = Keys::generate();
let relay_urls = get_file_hash_core::get_relay_urls();
let d_tag = "my-awesome-repo-example";
let pr_event_id = EventId::from_str("f6e4d6a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9").unwrap(); // Example PR Event ID
let updated_commit_id = "z9y8x7w6v5u4t3s2r1q0p9o8n7m6l5k4j3i2h1g0";
let updated_clone_url = "
[email protected]:user/my-feature-branch-v2.git";
// Dummy EventId for examples that require a build_manifest_event_id
const DUMMY_BUILD_MANIFEST_ID_STR: &str = "f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0";
let dummy_build_manifest_id = EventId::from_str(DUMMY_BUILD_MANIFEST_ID_STR).unwrap();
// Example 1: Without build_manifest_event_id
println!("Publishing PR update without build_manifest_event_id...");
publish_pr_update!(
&keys,
&relay_urls,
d_tag,
&pr_event_id,
updated_commit_id,
updated_clone_url
);
println!("PR update without build_manifest_event_id published.");
// Example 2: With build_manifest_event_id
println!("Publishing PR update with build_manifest_event_id...");
publish_pr_update!(
&keys,
&relay_urls,
d_tag,
&pr_event_id,
updated_commit_id,
updated_clone_url,
Some(&dummy_build_manifest_id)
);
println!("PR update with build_manifest_event_id published.");
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example publish_pr_update --features nostr");
}
[workspace]
members = [".", "src/get_file_hash_core", "n34", "n34-relay"]
[workspace.package]
version = "0.4.7"
edition = "2024"
license = "MIT"
authors = ["gnostr
[email protected]"]
documentation = "https://github.com/gnostr-org/get_file_hash#readme"
homepage = "https://github.com/gnostr-org/get_file_hash"
repository = "https://github.com/gnostr-org/get_file_hash"
description = "A utility crate providing a procedural macro to compute and embed file hashes at compile time."
[package]
name = "get_file_hash"
version.workspace = true
edition.workspace = true
description.workspace = true
repository.workspace = true
homepage.workspace = true
authors.workspace = true
license.workspace = true
[package.metadata.wix]
upgrade-guid = "DED69220-26E3-4406-B564-7F2B58C56F57"
path-guid = "8DB39A25-8B99-4C25-8CF5-4704353C7C6E"
license = false
eula = false
[features]
nostr = ["dep:nostr", "dep:nostr-sdk", "dep:hex"]
frost = ["dep:nostr", "dep:nostr-sdk", "dep:hex"]
gen-protos = []
[workspace.dependencies]
get_file_hash_core = { features = ["nostr"], path = "src/get_file_hash_core", version = "0.4.7" }
rand_chacha = "0.3"
sha2 = "0.11.0"
nostr = { version = "0.44.2", features = ["std", "nip46"] }
nostr-sdk = { version = "0.44.0", default-features = false, features = ["default"] }
hex = "0.4.2"
tokio = "1"
serde_json = "1.0"
csv = { version = "1.3.0", default-features = false }
url = "2.5.0"
reqwest = { version = "0.12.0", default-features = false }
tempfile = "3.27.0"
rand = "0.8"
frost-secp256k1-tr = "3.0.0-rc.0"
serial_test = { version = "3.4.0", features = ["test_logging"] }
log = "0.4"
n34 = { version = "0.4.0", path = "n34" }
n34-relay = { version = "0.1.1", path = "n34-relay" }
chrono = "0.4.41"
convert_case = "0.8.0"
dirs = "6.0.0"
easy-ext = "1.0.2"
either = "1.15.0"
futures = "0.3.31"
nostr-browser-signer-proxy = "0.43.0"
regex = "1.11.1"
thiserror = "2.0.12"
toml = "0.9.4"
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
[dependencies]
get_file_hash_core = { workspace = true, features = ["nostr"] }
sha2 = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
nostr = { workspace = true, optional = true }
nostr-sdk = { workspace = true, optional = true }
hex = { workspace = true, optional = true }
tokio = { workspace = true, features = ["full"] }
frost-secp256k1-tr = { workspace = true }
rand = { workspace = true }
serde_json = { workspace = true }
rand_chacha = { workspace = true }
n34 = { workspace = true }
n34-relay = { workspace = true }
axum = { version = "0.8.6", features = ["http2", "ws"] }
base64 = "0.22.1"
chrono = "0.4.42"
config = { version = "0.15.15", default-features = false, features = ["toml"] }
const_format = "0.2.34"
convert_case = "0.8.0"
easy-ext = "1.0.2"
either = "1.15.0"
flume = "0.11.1"
futures = "0.3.31"
hyper = "1.7.0"
hyper-util = "0.1.17"
parking_lot = { version = "0.12.5", features = ["serde"] }
prost = "0.14.1"
serde = { version = "1.0.219", features = ["rc"] }
#serde_json = "1.0.145"
serde_with = "3.15.0"
sha1 = "0.10.6"
#sha2 = "0.10.9"
strum = { version = "0.27.2", features = ["derive"] }
thiserror = "2.0.16"
tokio-util = { version = "0.7.17", features = ["io"] }
toml = "0.9.5"
tonic-prost = "0.14.2"
tower = { version = "0.5.2", features = ["limit"] }
#tracing = "0.1.41"
#tracing-subscriber = { version = "0.3.20", features = ["env-filter"] }
dirs = "6.0.0"
rhai = { version = "1.23.4", features = [
"no_position",
"sync",
"serde",
"decimal",
] }
##tokio = { version = "1.47.1", features = [
## "macros",
## "rt-multi-thread",
## "signal",
## "fs",
## "process",
##] }
tonic = { version = "0.14.2", features = [
"tls-ring",
"tls-webpki-roots",
"gzip",
"deflate",
] }
tower-http = { version = "0.6.6", features = [
"cors",
"decompression-br",
"decompression-deflate",
"decompression-gzip",
"decompression-zstd",
"trace",
"timeout",
] }
[dependencies.clap]
features = ["derive"]
version = "4.5.42"
[dependencies.clap-verbosity-flag]
default-features = false
features = ["tracing"]
version = "3.0.3"
# We frequently switch between stable and unstable versions; this will make the
# process easier.
## [dependencies.nostr]
## default-features = false
## features = ["std"]
## git = "https://git.4rs.nl/mirrors/nostr.git"
## rev = "27a1947d3"
## # version = "0.45.0"
[dependencies.nostr-database]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[dependencies.nostr-lmdb]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[dependencies.nostr-relay-builder]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[build-dependencies]
get_file_hash_core = { workspace = true, features = ["nostr"] }
sha2 = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["full"] }
nostr = { workspace = true }
nostr-sdk = { workspace = true }
hex = { workspace = true }
tonic-prost-build = "0.14.2"
[target.'cfg(not(windows))'.build-dependencies]
protobuf-src = "2.1.0"
# The profile that 'dist' will build with
[profile.dist]
inherits = "release"
lto = "thin"
[dev-dependencies]
serial_test = { workspace = true }
[[example]]
name = "gnostr-build"
path = "examples/gnostr-build.rs"
required-features = ["nostr"]
[workspace]
members = ["cargo:."]
# Config for 'dist'
[dist]
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.30.3"
# CI backends to support
ci = "github"
# The installers to generate for each app
installers = ["shell", "powershell", "homebrew", "msi"]
# A GitHub repo to push Homebrew formulas to
tap = "gnostr-org/homebrew-gnostr-org"
# Target platforms to build apps for (Rust target-triple syntax)
targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"]
# Path that installers should place binaries in
install-path = "CARGO_HOME"
# Publish jobs to run in CI
publish-jobs = ["homebrew"]
# Whether to install an updater program
install-updater = true
# Skip checking whether the specified configuration files are up to date
allow-dirty = ["ci"]
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
get_file_hash_core::frost_mailbox_logic::simulate_frost_mailbox_coordinator()
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example frost_mailbox --features nostr");
}
#[cfg(feature = "nostr")]
use get_file_hash_core::{get_relay_urls, publish_patch, publish_metadata_event, DEFAULT_PICTURE_URL, DEFAULT_BANNER_URL};
#[cfg(feature = "nostr")]
#[tokio::main]
async fn main() {
use nostr_sdk::Keys;
use nostr_sdk::EventId;
use std::str::FromStr;
let keys = Keys::generate();
let relay_urls = get_relay_urls();
let d_tag = "my-gnostr-repository-patch-with-metadata-example"; // Repository identifier
let commit_id = "f1e2d3c4b5a6f7e8d9c0b1a2f3e4d5c6b7a8f9e0"; // Example commit ID
// Metadata for NIP-01 event
let picture_url = DEFAULT_PICTURE_URL;
let banner_url = DEFAULT_BANNER_URL;
let metadata_file_path = "./README.md"; // Using README.md content for metadata
// Dummy EventId for examples that require a build_manifest_event_id
const DUMMY_BUILD_MANIFEST_ID_STR: &str = "f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0";
let dummy_build_manifest_id = EventId::from_str(DUMMY_BUILD_MANIFEST_ID_STR).unwrap();
println!("Publishing NIP-01 Metadata Event...");
publish_metadata_event(
&keys,
&relay_urls,
picture_url,
banner_url,
metadata_file_path
).await;
println!("NIP-01 Metadata Event published.");
println!("
Publishing NIP-34 Patch Event without build_manifest_event_id...");
publish_patch!(
&keys,
&relay_urls,
d_tag,
commit_id,
"../Cargo.toml" // Use an existing file for the patch content
);
println!("NIP-34 Patch Event without build_manifest_event_id published.");
println!("
Publishing NIP-34 Patch Event with build_manifest_event_id...");
publish_patch!(
&keys,
&relay_urls,
d_tag,
commit_id,
"../Cargo.toml", // Use an existing file for the patch content
Some(&dummy_build_manifest_id)
);
println!("NIP-34 Patch Event with build_manifest_event_id published.");
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example publish_patch_with_metadata --features nostr");
}
/// deterministic nostr event build example
// deterministic nostr event build example
use get_file_hash_core::get_file_hash;
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use get_file_hash_core::{get_git_tracked_files, DEFAULT_GNOSTR_KEY, DEFAULT_PICTURE_URL, DEFAULT_BANNER_URL, publish_nostr_event_if_release, get_repo_announcement_event};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use nostr_sdk::{EventBuilder, Keys, Tag, SecretKey};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use std::fs;
use std::path::PathBuf;
use sha2::{Digest, Sha256};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use ::hex;
#[cfg(feature = "gen-protos")]
fn compile_protos() {
tonic_prost_build::configure()
.build_server(true)
.build_client(true)
.build_transport(true)
.protoc_arg("--experimental_allow_proto3_optional")
//.compile_protos(&["proto/plugins.proto"], &["proto"])
.compile_protos(&["n34-relay/proto/plugins.proto"], &["n34-relay/proto"])
.expect("protoc is required");
}
#[cfg(not(feature = "gen-protos"))]
fn compile_protos() {}
#[tokio::main]
async fn main() {
compile_protos();
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let is_git_repo = std::path::Path::new(&manifest_dir).join(".git").exists();
#[cfg(all(not(debug_assertions), feature = "nostr"))]
#[allow(unused_mut)]
let mut git_branch_str = String::new();
println!("cargo:rustc-env=CARGO_PKG_NAME={}", env!("CARGO_PKG_NAME"));
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", env!("CARGO_PKG_VERSION"));
if is_git_repo {
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
let git_branch_output = std::process::Command::new("git")
.args(&["rev-parse", "--abbrev-ref", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for branch name");
let git_branch_str = if git_branch_output.status.success() && !git_branch_output.stdout.is_empty() {
String::from_utf8(git_branch_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git branch command failed or returned empty. Status: {:?}, Stderr: {}",
git_branch_output.status, String::from_utf8_lossy(&git_branch_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_BRANCH={}", git_branch_str);
} else {
println!("cargo:rustc-env=GIT_COMMIT_HASH=");
println!("cargo:rustc-env=GIT_BRANCH=");
}
println!("cargo:rerun-if-changed=.git/HEAD");
//#[cfg(all(not(debug_assertions), feature = "nostr"))]
//let relay_urls = get_file_hash_core::get_relay_urls();
let cargo_toml_hash = get_file_hash!("Cargo.toml");
println!("cargo:rustc-env=CARGO_TOML_HASH={}", cargo_toml_hash);
let lib_hash = get_file_hash!("src/lib.rs");
println!("cargo:rustc-env=LIB_HASH={}", lib_hash);
let build_hash = get_file_hash!("build.rs");
println!("cargo:rustc-env=BUILD_HASH={}", build_hash);
println!("cargo:rerun-if-changed=Cargo.toml");
println!("cargo:rerun-if-changed=src/lib.rs");
println!("cargo:rerun-if-changed=build.rs");
let online_relays_csv_path = PathBuf::from(&manifest_dir).join("src/get_file_hash_core/src/online_relays_gps.csv");
if online_relays_csv_path.exists() {
println!("cargo:rerun-if-changed={}", online_relays_csv_path.to_str().unwrap());
}
#[cfg(all(not(debug_assertions), feature = "nostr"))]
if cfg!(not(debug_assertions)) {
println!("cargo:warning=Nostr feature enabled: Build may take longer due to network operations (publishing events to relays).");
// This code only runs in release builds
let package_version = std::env::var("CARGO_PKG_VERSION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let files_to_publish: Vec<String> = get_git_tracked_files(&PathBuf::from(&manifest_dir));
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
// Create padded_commit_hash
let padded_commit_hash = format!("{:0>64}", &git_commit_hash_str);
println!("cargo:rustc-env=PADDED_COMMIT_HASH={}", padded_commit_hash);
// Initialize client and keys once
let initial_secret_key = SecretKey::parse(&padded_commit_hash).expect("Failed to create Nostr SecretKey from PADDED_COMMIT_HASH");
let initial_keys = Keys::new(initial_secret_key);
let mut client = nostr_sdk::Client::new(initial_keys.clone());
let mut relay_urls = get_file_hash_core::get_relay_urls();
// Add relays to the client
for relay_url in relay_urls.iter() {
if let Err(e) = client.add_relay(relay_url).await {
println!("cargo:warning=Failed to add relay {}: {}", relay_url, e);
}
}
client.connect().await;
println!("cargo:warning=Added and connected to {} relays.", relay_urls.len());
let mut published_event_ids: Vec<Tag> = Vec::new();
let mut total_bytes_sent: usize = 0;
for file_path_str in &files_to_publish {
println!("cargo:warning=Processing file: {}", file_path_str);
match fs::read(file_path_str) {
Ok(bytes) => {
let mut hasher = Sha256::new();
hasher.update(&bytes);
let result = hasher.finalize();
let file_hash_hex = hex::encode(result);
match SecretKey::from_hex(&file_hash_hex.clone()) {
Ok(secret_key) => {
let keys = Keys::new(secret_key);
let content = String::from_utf8_lossy(&bytes).into_owned();
let tags = vec![
Tag::parse(["file", file_path_str].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["version", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
let event_builder = EventBuilder::text_note(content).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(&mut client, file_hash_hex, keys.clone(), event_builder, &mut relay_urls, file_path_str, &output_dir, &mut total_bytes_sent).await {
published_event_ids.push(Tag::event(event_id));
}
// Publish metadata event
get_file_hash_core::publish_metadata_event(
&keys,
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
file_path_str,
).await;
}
Err(e) => {
println!("cargo:warning=Failed to derive Nostr secret key for {}: {}", file_path_str, e);
}
}
}
Err(e) => {
println!("cargo:warning=Failed to read file {}: {}", file_path_str, e);
}
}
}
// Create and publish the build_manifest
if !published_event_ids.is_empty() {
//TODO this will be either the default or detected from env vars PRIVATE_KEY
let keys = Keys::new(SecretKey::from_hex(DEFAULT_GNOSTR_KEY).expect("Failed to create Nostr keys from DEFAULT_GNOSTR_KEY"));
let cloned_keys = keys.clone();
let content = format!("Build manifest for get_file_hash v{}", package_version);
let mut tags = vec![
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
tags.extend(published_event_ids);
let event_builder = EventBuilder::text_note(content.clone()).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(
&mut client,
hex::encode(Sha256::digest(content.as_bytes())),
keys,
event_builder,
&mut relay_urls,
"build_manifest.json",
&output_dir,
&mut total_bytes_sent,
).await {
let build_manifest_event_id = Some(event_id);
// Publish metadata event for the build manifest
get_file_hash_core::publish_metadata_event(
&cloned_keys, // Use reference to cloned keys here
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
&format!("build_manifest:{}", package_version),
).await;
let git_commit_hash = &git_commit_hash_str;
let git_branch = &git_branch_str;
let repo_url = std::env::var("CARGO_PKG_REPOSITORY").unwrap();
let repo_name = std::env::var("CARGO_PKG_NAME").unwrap();
let repo_description = std::env::var("CARGO_PKG_DESCRIPTION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let announcement_keys = Keys::new(SecretKey::from_hex(build_manifest_event_id.unwrap().to_hex().as_str()).expect("Failed to create Nostr keys from build_manifest_event_id"));
let announcement_pubkey_hex = announcement_keys.public_key().to_string();
// Publish NIP-34 Repository Announcement
if let Some(_event_id) = get_repo_announcement_event(
&mut client,
&announcement_keys,
&relay_urls,
&repo_url,
&repo_name,
&repo_description,
&git_commit_hash,
&git_branch,
&output_dir,
&announcement_pubkey_hex
).await {
// Successfully published announcement
}
}
}
println!("cargo:warning=Total bytes sent to Nostr relays: {} bytes ({} MB)", total_bytes_sent, total_bytes_sent as f64 / 1024.0 / 1024.0);
}
}
// deterministic nostr event build example
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost; // MUST use the -tr variant for BIP-340/Nostr
#[cfg(feature = "nostr")]
use rand::thread_rng;
#[cfg(feature = "nostr")]
use serde_json::json;
#[cfg(feature = "nostr")]
use sha2::{Digest, Sha256};
#[cfg(feature = "nostr")]
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
use hex;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut rng = thread_rng();
let (max_signers, min_signers) = (3, 2);
// 1. Setup Nostr Event Metadata
let pubkey_hex = "79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798"; // Example
let created_at = 1712050000;
let kind = 1;
let content = "Hello from ROAST threshold signatures!";
// 2. Serialize for Nostr ID (per NIP-01)
let event_json = json!([
0,
pubkey_hex,
created_at,
kind,
[],
content
]).to_string();
let mut hasher = Sha256::new();
hasher.update(event_json.as_bytes());
let event_id = hasher.finalize(); // This 32-byte hash is our signing message
// 3. FROST/ROAST Key Generation
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
max_signers,
min_signers,
frost::keys::IdentifierList::Default,
&mut rng,
)?;
// 4. ROAST Coordination Simulation (Round 1: Commitments)
// In ROAST, the coordinator keeps a "session" open and collects commitments
let mut session_commitments = BTreeMap::new();
let mut signer_nonces = BTreeMap::new();
// Signers 1 and 3 respond first (Signer 2 is offline/slow)
for &id_val in &[1, 3] {
let id = frost::Identifier::try_from(id_val as u16)?;
let (nonces, comms) = frost::round1::commit(shares[&id].signing_share(), &mut rng);
session_commitments.insert(id, comms);
signer_nonces.insert(id, nonces);
}
// 5. Round 2: Signing the Nostr ID
let signing_package = frost::SigningPackage::new(session_commitments, &event_id);
let mut signature_shares = BTreeMap::new();
for (id, nonces) in signer_nonces {
let key_package: frost::keys::KeyPackage = shares[&id].clone().try_into()?;
let share = frost::round2::sign(&signing_package, &nonces, &key_package)?;
signature_shares.insert(id, share);
}
// 6. Aggregate into a BIP-340 Signature
let group_signature = frost::aggregate(
&signing_package,
&signature_shares,
&pubkey_package,
)?;
// 7. Verification (using BIP-340 logic)
pubkey_package.verifying_key().verify(&event_id, &group_signature)?;
println!("Nostr Event ID: {}", hex::encode(event_id));
println!("Threshold Signature (BIP-340): {}", hex::encode(group_signature.serialize()?));
println!("Successfully signed Nostr event using ROAST/FROST!");
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example frost_bip_340 --features nostr");
}
#[cfg(feature = "nostr")]
use get_file_hash_core::{get_git_tracked_files, DEFAULT_GNOSTR_KEY, DEFAULT_PICTURE_URL, DEFAULT_BANNER_URL, publish_nostr_event_if_release, get_repo_announcement_event, publish_patch_event};
#[cfg(feature = "nostr")]
#[tokio::main]
async fn main() {
use get_file_hash_core::publish_patch;
use nostr_sdk::Keys;
use nostr_sdk::EventId;
use std::str::FromStr;
let keys = Keys::generate();
let relay_urls = get_file_hash_core::get_relay_urls();
let d_tag = "my-awesome-repo-example";
let commit_id = "a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0"; // Example commit ID
// Dummy EventId for examples that require a build_manifest_event_id
const DUMMY_BUILD_MANIFEST_ID_STR: &str = "f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0";
let dummy_build_manifest_id = EventId::from_str(DUMMY_BUILD_MANIFEST_ID_STR).unwrap();
// Example 1: Without build_manifest_event_id
println!("Publishing patch without build_manifest_event_id...");
publish_patch!(
&keys,
&relay_urls,
d_tag,
commit_id,
"../Cargo.toml" // Use an existing file for the patch content
);
println!("Patch without build_manifest_event_id published.");
// Example 2: With build_manifest_event_id
println!("Publishing patch with build_manifest_event_id...");
publish_patch!(
&keys,
&relay_urls,
d_tag,
commit_id,
"../Cargo.toml", // Use an existing file for the patch content
Some(&dummy_build_manifest_id)
);
println!("Patch with build_manifest_event_id published.");
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example publish_patch --features nostr");
}
# `build.rs` Documentation
This document explains the functionality of the `build.rs` script in this project. The `build.rs` script is a special Rust file that, if present, Cargo will compile and run *before* compiling the rest of your package. It's typically used for tasks that need to be performed during the build process, such as generating code, setting environment variables, or performing conditional compilation.
## Core Functionality
The `build.rs` script in this project performs the following key functions:
1. **Environment Variable Injection:** It computes various project-related values at compile time and injects them as environment variables (`CARGO_RUSTC_ENV=...`) that can be accessed by the main crate using `env!("VAR_NAME")`. This includes:
* `CARGO_PKG_NAME`: The name of the current package (from `Cargo.toml`).
* `CARGO_PKG_VERSION`: The version of the current package (from `Cargo.toml`).
* `GIT_COMMIT_HASH`: The full commit hash of the current Git HEAD (if in a Git repository).
* `GIT_BRANCH`: The name of the current Git branch (if in a Git repository).
* `CARGO_TOML_HASH`: The SHA-256 hash of the `Cargo.toml` file.
* `LIB_HASH`: The SHA-256 hash of the `src/lib.rs` file.
* `BUILD_HASH`: The SHA-256 hash of the `build.rs` file itself.
2. **Rerun Conditions:** It tells Cargo when to re-run the build script. This ensures that the injected environment variables and any conditional compilation logic are up-to-date if relevant files change:
* `Cargo.toml`
* `src/lib.rs`
* `build.rs`
* `.git/HEAD` (to detect changes in the Git repository like new commits or branch switches).
* `src/get_file_hash_core/src/online_relays_gps.csv` (conditionally, if the file exists).
3. **Conditional Nostr Event Publishing (Release Builds with `nostr` feature):**
If the project is being compiled in **release mode (`--release`)** and the **`nostr` feature is enabled (`--features nostr`)**, the `build.rs` script will connect to Nostr relays and publish events. This is intended for "deterministic Nostr event build examples" as indicated by the comments in the file.
* **Relay Management:** It retrieves a list of default relay URLs. During event publishing, it identifies and removes "unfriendly" or unresponsive relays (e.g., those with timeout, connection issues, or spam blocks) from the list for subsequent publications.
* **File Hashing and Key Generation:** For each Git-tracked file (when in a Git repository), it computes its SHA-256 hash. This hash is then used to derive a Nostr `SecretKey`.
* **Event Creation:**
* **Individual File Events:** For each Git-tracked file, a Nostr `text_note` event is created. This event includes tags for:
* `#file`: The path of the file.
* `#version`: The package version.
* `#commit`: The Git commit hash (if in a Git repository).
* `#branch`: The Git branch name (if in a Git repository).
* **Metadata Event:** It publishes a metadata event using `get_file_hash_core::publish_metadata_event`.
* **Linking Event (Build Manifest):** After processing all individual files, if any events were published, a final "build manifest" `text_note` event is created. This event links to all the individual file events that were published during the build using event tags.
* **Output Storage:** The JSON representation of successfully published Nostr events (specifically the `EventId`) is saved to `~/.gnostr/build/{package_version}/{file_path_str_sanitized}/{hash}/{public_key}/{event_id}.json`. This provides a local record of what was published.
### `publish_nostr_event_if_release` Function
This asynchronous helper function is responsible for:
* Adding relays to the Nostr client.
* Connecting to relays.
* Signing the provided `EventBuilder` to create an `Event`.
* Sending the event to the configured relays.
* Logging success or failure for each relay.
* Identifying and removing unresponsive relays from the `relay_urls` list.
* Saving the published event's JSON to the local filesystem.
### `should_remove_relay` Function
This helper function determines if a relay should be considered "unfriendly" or unresponsive based on common error messages received during Nostr event publication.
## Usage
To prevent 'Too many open files' errors, especially during builds and tests involving numerous file operations or subprocesses (like `git ls-files` or parallel test execution), it may be necessary to increase the file descriptor limit.
* **For local development**: Run `ulimit -n 4096` in your terminal session before executing `cargo build` or `cargo test`. This setting is session-specific.
* **For CI environments**: The `.github/workflows/rust.yml` workflow is configured to set `ulimit -n 4096` for relevant test steps to ensure consistent execution.
The values set by `build.rs` can be accessed in your Rust code (e.g., `src/lib.rs`) at compile time using the `env!` macro. For example:
```rust
pub const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
```
The Nostr event publishing functionality of `build.rs` is primarily for release builds with the `nostr` feature enabled, allowing for the automatic, deterministic publication of project state to the Nostr network as part of the CI/CD pipeline.
## Example Commands
To interact with the `build.rs` script's features, especially those related to Nostr event publishing, you can use the following `cargo` commands:
* **Build in release mode with Nostr feature (verbose output):**
```bash
cargo build --release --workspace --features nostr -vv
```
* **Run tests for `get_file_hash_core` sequentially with Nostr feature and verbose logging (as in CI):**
```bash
RUST_LOG=info,nostr_sdk=debug,frost=debug cargo test -p get_file_hash_core --features nostr -- --test-threads 1 --nocapture
```
* **Run all workspace tests in release mode with Nostr feature:**
```bash
cargo test --workspace --release --features nostr
```
* **Build `get_file_hash_core` in release mode with Nostr feature (very verbose output):**
```bash
cargo build --release --features nostr -vv -p get_file_hash_core
```
* **Run `get_file_hash_core` tests in release mode with Nostr feature (very verbose output):**
```bash
cargo test --release --features nostr -vv -p get_file_hash_core
```
# `get_file_hash` macro
This project provides a Rust procedural macro, `get_file_hash!`, designed to compute the SHA-256 hash of a specified file at compile time. This hash is then embedded directly into your compiled executable. This feature is invaluable for:
* **Integrity Verification:** Ensuring the deployed code hasn't been tampered with.
* **Versioning:** Embedding a unique identifier linked to the exact source code version.
* **Cache Busting:** Generating unique names for assets based on their content.
## Project Structure
* `get_file_hash_core`: A foundational crate containing the `get_file_hash!` macro definition.
* `get_file_hash`: The main library crate that re-exports the macro.
* `src/bin/get_file_hash.rs`: An example executable demonstrating the macro's usage by hashing its own source file and updating this `README.md`.
* `build.rs`: A build script that also utilizes the `get_file_hash!` macro to hash `Cargo.toml` during the build process.
## Usage of `get_file_hash!` Macro
To use the `get_file_hash!` macro, ensure you have `get_file_hash` (or `get_file_hash_core` for direct usage) as a dependency in your `Cargo.toml`.
### Example
```rust
use get_file_hash::get_file_hash;
use get_file_hash::CARGO_TOML_HASH;
use sha2::{Digest, Sha256};
fn main() {
// The macro resolves the path relative to CARGO_MANIFEST_DIR
let readme_hash = get_file_hash!("src/bin/readme.rs");
let lib_hash = get_file_hash!("src/lib.rs");
println!("The SHA-256 hash of src/lib.rs is: {}", lib_hash);
println!("The SHA-256 hash of src/bin/readme.rs is: {}", readme_hash);
println!("The SHA-256 hash of Cargo.toml is: {}", CARGO_TOML_HASH);
}
```
## Release
## [`README.md`](./README.md)
```bash
cargo run --bin readme > README.md
```
## [`src/bin/readme.rs`](src/bin/readme.rs)
* **Target File:** `src/bin/readme.rs`
## NIP-34 Integration: Git Repository Events on Nostr
This library provides a set of powerful macros and functions for integrating Git repository events with the Nostr protocol, adhering to the [NIP-34: Git Repositories on Nostr](https://github.com/nostr-protocol/nips/blob/master/34.md) specification.
These tools allow you to publish various Git-related events to Nostr relays, enabling decentralized tracking and collaboration for your code repositories.
### Available NIP-34 Macros
Each macro provides a convenient way to publish specific NIP-34 event kinds:
* [`repository_announcement!`](#repository_announcement)
* Publishes a `Repository Announcement` event (Kind 30617) to announce a new or updated Git repository.
* [`publish_patch!`](#publish_patch)
* Publishes a `Patch` event (Kind 1617) containing a Git patch (diff) for a specific commit.
* [`publish_pull_request!`](#publish_pull_request)
* Publishes a `Pull Request` event (Kind 1618) to propose changes and facilitate code review.
* [`publish_pr_update!`](#publish_pr_update)
* Publishes a `Pull Request Update` event (Kind 1619) to update an existing pull request.
* [`publish_repository_state!`](#publish_repository_state)
* Publishes a `Repository State` event (Kind 1620) to announce the current state of a branch (e.g., its latest commit).
* [`publish_issue!`](#publish_issue)
* Publishes an `Issue` event (Kind 1621) to report bugs, request features, or track tasks.
### Running NIP-34 Examples
To see these macros in action, navigate to the `examples/` directory and run each example individually with the `nostr` feature enabled:
```bash
cargo run --example repository_announcement --features nostr
cargo run --example publish_patch --features nostr
cargo run --example publish_pull_request --features nostr
cargo run --example publish_pr_update --features nostr
cargo run --example publish_repository_state --features nostr
cargo run --example publish_issue --features nostr
```
* **SHA-256 Hash:** 6c6325c5a4c14f44cbda6ca53179ab3d6666ce7c916365668c6dd1d79215db59
* **Status:** Integrity Verified..
##
## [`build.rs`](build.rs)
* **Target File:** `build.rs`
* **SHA-256 Hash:** 20c958c8cbb5c77cf5eb3763b6da149b61241d328df52d39b7aa97903305c889
* **Status:** Integrity Verified..
##
## [`Cargo.toml`](Cargo.toml)
* **Target File:** `Cargo.toml`
* **SHA-256 Hash:** e3f392bf23b5fb40902acd313a8c76d1943060b6805ea8615de62f9baf0c6513
* **Status:** Integrity Verified..
##
## [`src/lib.rs`](src/lib.rs)
* **Target File:** `src/lib.rs`
* **SHA-256 Hash:** 591593482a6c9aac8793aa1e488e613f52a4effb1ec3465fd9d6a54537f2b123
* **Status:** Integrity Verified..
#[cfg(feature = "nostr")]
use rand_chacha::ChaCha20Rng;
#[cfg(feature = "nostr")]
use rand_chacha::rand_core::SeedableRng;
#[cfg(feature = "nostr")]
use hex;
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use frost::keys::IdentifierList;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Create a deterministic seed (e.g., 32 bytes of zeros or a Git Hash)
let seed_hex = "473a0f4c3be8a93681a267e3b1e9a7dcda1185436fe141f7749120a303721813";
let seed_bytes = hex::decode(seed_hex)?;
let mut rng = ChaCha20Rng::from_seed(seed_bytes.try_into().map_err(|_| "Invalid seed length")?);
let max_signers = 3;
let min_signers = 2;
////////////////////////////////////////////////////////////////////////////
// Round 0: Key Generation (Trusted Dealer)
////////////////////////////////////////////////////////////////////////////
// Using IdentifierList::Default creates identifiers 1, 2, 3...
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
max_signers,
min_signers,
IdentifierList::Default,
&mut rng,
)?;
println!("--- Deterministic FROST Dealer ---");
println!("Threshold: {} of {}", min_signers, max_signers);
println!("Number of shares generated: {}", shares.len());
println!("\n--- Verifying Shares Against Commitments ---");
for (identifier, share) in &shares {
// The Deterministic Values (Scalar Hex)
// Because your seed is fixed to the EMPTY_BLOB_SHA256,
// the "redacted" values in your output are always the same.
// Here are the Secret Signing Shares (the private scalars) for your 2-of-3 setup:
//
// Participant,Identifier (x),Signing Share (f(x)) in Hex
// Participant 1,...0001,757f49553754988450d995c65a0459a0f5a703d7c585f95f468202d09a365f57
// Participant 2,...0002,a3c4835e32308cb11b43968962290bc9171f1f1ca90c21741890e4f326f9879b
// Participant 3,...0003,d209bd672d0c80dd65ad974c6a4dc1f138973a618c924988eaaa0715b3bcafdf
//
// println!("Participant Identifier: {:?} {:?}", identifier, _share);
//
// In FROST, the 'verify' method checks the share against the VSS commitment
match share.verify() {
Ok(_) => {
println!("Participant {:?}: Valid ✅", identifier);
}
Err(e) => {
println!("Participant {:?}: INVALID! ❌ Error: {:?}", identifier, e);
}
}
}
let pubkey_bytes = pubkey_package.verifying_key().serialize()?;
println!("Group Public Key (Hex Compressed): {}", hex::encode(&pubkey_bytes));
let x_only_hex = hex::encode(&pubkey_bytes[1..]);
println!("Group Public Key (Hex X-Only): {}", x_only_hex);
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("Run with --features nostr to enable this example.");
}
#[cfg(feature = "nostr")]
use get_file_hash_core::{get_relay_urls, publish_issue, DEFAULT_GNOSTR_KEY, DEFAULT_PICTURE_URL, DEFAULT_BANNER_URL, publish_nostr_event_if_release, get_repo_announcement_event, publish_patch_event};
#[cfg(feature = "nostr")]
#[tokio::main]
async fn main() {
use nostr_sdk::Keys;
use nostr_sdk::EventId;
use std::str::FromStr;
let keys = Keys::generate();
let relay_urls = get_relay_urls();
let d_tag = "my-gnostr-repository-issue-example"; // Repository identifier
let issue_id_1 = "issue-001"; // Unique identifier for the first issue
let issue_id_2 = "issue-002"; // Unique identifier for the second issue
let title_1 = "Bug: Application crashes on startup";
let content_1 = "The application fails to launch on macOS Ventura. It throws a 'Segmentation Fault' error immediately after execution. This was observed on version `v1.2.3`.
Steps to reproduce:
1. Download `app-v1.2.3-macos.tar.gz`
2. Extract the archive
3. Run `./app`
Expected behavior: Application launches successfully.
Actual behavior: Application crashes with 'Segmentation Fault'.";
let title_2 = "Feature Request: Dark Mode";
let content_2 = "Users have requested a dark mode option to improve readability and reduce eye strain during prolonged use. This should be toggleable in the settings menu.
Considerations:
- Adherence to system dark mode settings.
- Consistent styling across all UI components.";
// Dummy EventId for examples that require a build_manifest_event_id
const DUMMY_BUILD_MANIFEST_ID_STR: &str = "f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0";
let dummy_build_manifest_id = EventId::from_str(DUMMY_BUILD_MANIFEST_ID_STR).unwrap();
// Example 1: Publish an issue without build_manifest_event_id
println!("Publishing issue '{}' without build_manifest_event_id...", title_1);
publish_issue!(
&keys,
&relay_urls,
d_tag,
issue_id_1,
title_1,
content_1
);
println!("Issue '{}' published.", title_1);
// Example 2: Publish an issue with build_manifest_event_id
println!("Publishing issue '{}' with build_manifest_event_id...", title_2);
publish_issue!(
&keys,
&relay_urls,
d_tag,
issue_id_2,
title_2,
content_2,
Some(&dummy_build_manifest_id)
);
println!("Issue '{}' published.", title_2);
}
#[cfg(not(feature = "nostr"))]
fn main() {
println!("This example requires the 'nostr' feature. Please run with: cargo run --example publish_issue --features nostr");
}
[workspace]
members = [".", "src/get_file_hash_core", "n34", "n34-relay"]
[workspace.package]
version = "0.4.7"
edition = "2024"
license = "MIT"
authors = ["gnostr
[email protected]"]
documentation = "https://github.com/gnostr-org/get_file_hash#readme"
homepage = "https://github.com/gnostr-org/get_file_hash"
repository = "https://github.com/gnostr-org/get_file_hash"
description = "A utility crate providing a procedural macro to compute and embed file hashes at compile time."
[package]
name = "get_file_hash"
version.workspace = true
edition.workspace = true
description.workspace = true
repository.workspace = true
homepage.workspace = true
authors.workspace = true
license.workspace = true
[package.metadata.wix]
upgrade-guid = "DED69220-26E3-4406-B564-7F2B58C56F57"
path-guid = "8DB39A25-8B99-4C25-8CF5-4704353C7C6E"
license = false
eula = false
[features]
nostr = ["dep:nostr", "dep:nostr-sdk", "dep:hex"]
frost = ["dep:nostr", "dep:nostr-sdk", "dep:hex"]
gen-protos = []
[workspace.dependencies]
get_file_hash_core = { features = ["nostr"], path = "src/get_file_hash_core", version = "0.4.7" }
rand_chacha = "0.3"
sha2 = "0.11.0"
nostr = { version = "0.44.2", features = ["std", "nip46"] }
nostr-sdk = { version = "0.44.0", default-features = false, features = ["default"] }
hex = "0.4.2"
tokio = "1"
serde_json = "1.0"
csv = { version = "1.3.0", default-features = false }
url = "2.5.0"
reqwest = { version = "0.12.0", default-features = false }
tempfile = "3.27.0"
rand = "0.8"
frost-secp256k1-tr = "3.0.0-rc.0"
serial_test = { version = "3.4.0", features = ["test_logging"] }
log = "0.4"
n34 = { version = "0.4.0", path = "n34" }
n34-relay = { version = "0.1.1", path = "n34-relay" }
chrono = "0.4.41"
convert_case = "0.8.0"
dirs = "6.0.0"
easy-ext = "1.0.2"
either = "1.15.0"
futures = "0.3.31"
nostr-browser-signer-proxy = "0.43.0"
regex = "1.11.1"
thiserror = "2.0.12"
toml = "0.9.4"
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
[dependencies]
get_file_hash_core = { workspace = true, features = ["nostr"] }
sha2 = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
nostr = { workspace = true, optional = true }
nostr-sdk = { workspace = true, optional = true }
hex = { workspace = true, optional = true }
tokio = { workspace = true, features = ["full"] }
frost-secp256k1-tr = { workspace = true }
rand = { workspace = true }
serde_json = { workspace = true }
rand_chacha = { workspace = true }
n34 = { workspace = true }
n34-relay = { workspace = true }
axum = { version = "0.8.6", features = ["http2", "ws"] }
base64 = "0.22.1"
chrono = "0.4.42"
config = { version = "0.15.15", default-features = false, features = ["toml"] }
const_format = "0.2.34"
convert_case = "0.8.0"
easy-ext = "1.0.2"
either = "1.15.0"
flume = "0.11.1"
futures = "0.3.31"
hyper = "1.7.0"
hyper-util = "0.1.17"
parking_lot = { version = "0.12.5", features = ["serde"] }
prost = "0.14.1"
serde = { version = "1.0.219", features = ["rc"] }
#serde_json = "1.0.145"
serde_with = "3.15.0"
sha1 = "0.10.6"
#sha2 = "0.10.9"
strum = { version = "0.27.2", features = ["derive"] }
thiserror = "2.0.16"
tokio-util = { version = "0.7.17", features = ["io"] }
toml = "0.9.5"
tonic-prost = "0.14.2"
tower = { version = "0.5.2", features = ["limit"] }
#tracing = "0.1.41"
#tracing-subscriber = { version = "0.3.20", features = ["env-filter"] }
dirs = "6.0.0"
rhai = { version = "1.23.4", features = [
"no_position",
"sync",
"serde",
"decimal",
] }
##tokio = { version = "1.47.1", features = [
## "macros",
## "rt-multi-thread",
## "signal",
## "fs",
## "process",
##] }
tonic = { version = "0.14.2", features = [
"tls-ring",
"tls-webpki-roots",
"gzip",
"deflate",
] }
tower-http = { version = "0.6.6", features = [
"cors",
"decompression-br",
"decompression-deflate",
"decompression-gzip",
"decompression-zstd",
"trace",
"timeout",
] }
[dependencies.clap]
features = ["derive"]
version = "4.5.42"
[dependencies.clap-verbosity-flag]
default-features = false
features = ["tracing"]
version = "3.0.3"
# We frequently switch between stable and unstable versions; this will make the
# process easier.
## [dependencies.nostr]
## default-features = false
## features = ["std"]
## git = "https://git.4rs.nl/mirrors/nostr.git"
## rev = "27a1947d3"
## # version = "0.45.0"
[dependencies.nostr-database]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[dependencies.nostr-lmdb]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[dependencies.nostr-relay-builder]
default-features = false
git = "https://git.4rs.nl/mirrors/nostr.git"
rev = "27a1947d3"
# version = "0.45.0"
[build-dependencies]
get_file_hash_core = { workspace = true, features = ["nostr"] }
sha2 = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["full"] }
nostr = { workspace = true }
nostr-sdk = { workspace = true }
hex = { workspace = true }
tonic-prost-build = "0.14.2"
[target.'cfg(not(windows))'.build-dependencies]
protobuf-src = "2.1.0"
# The profile that 'dist' will build with
[profile.dist]
inherits = "release"
lto = "thin"
[dev-dependencies]
serial_test = { workspace = true }
[[example]]
name = "gnostr-build"
path = "examples/gnostr-build.rs"
required-features = ["nostr"]
/// Usage: cargo run --example cli-parser --features nostr
#[cfg(not(feature = "nostr"))]
fn main() {
println!("Run with --features nostr to enable this example.");
}
#[cfg(feature = "nostr")]
use clap::{Parser, Subcommand};
#[cfg(feature = "nostr")]
use frost_secp256k1_tr as frost;
#[cfg(feature = "nostr")]
use frost::round1::{self, SigningCommitments, SigningNonces};
#[cfg(feature = "nostr")]
use frost::keys::IdentifierList;
#[cfg(feature = "nostr")]
use rand_chacha::ChaCha20Rng;
#[cfg(feature = "nostr")]
use rand::SeedableRng;
#[cfg(feature = "nostr")]
use std::fs;
#[cfg(feature = "nostr")]
use std::path::PathBuf;
#[cfg(feature = "nostr")]
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
#[derive(Parser)]
#[cfg(feature = "nostr")]
#[command(name = "gnostr-frost")]
#[cfg(feature = "nostr")]
#[command(version = "0.1.0")]
#[cfg(feature = "nostr")]
#[command(about = "BIP-64MOD + GCC Threshold Signature Tool", long_about = None)]
#[cfg(feature = "nostr")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[cfg(feature = "nostr")]
#[derive(Subcommand)]
#[cfg(feature = "nostr")]
enum Commands {
/// Step 1: Generate a new T-of-N key set (Dealer Mode)
Keygen {
#[arg(long, default_value_t = 2)]
threshold: u16,
#[arg(long, default_value_t = 3)]
total: u16,
#[arg(short, long)]
output_dir: Option<PathBuf>,
},
/// Step 2: Generate a batch of public/private nonces
Batch {
#[arg(short, long, default_value_t = 10)]
count: u16,
#[arg(short, long)]
key: PathBuf,
},
/// Step 3: Sign a message hash using a vaulted nonce index
Sign {
#[arg(short, long)]
message: String,
#[arg(short, long)]
index: u64,
#[arg(short, long)]
key: PathBuf,
#[arg(short, long)]
vault: PathBuf,
},
/// Step 4: Aggregate shares into a final BIP-340 signature
Aggregate {
#[arg(short, long)]
message: String,
#[arg(required = true)]
shares: Vec<String>,
},
/// Step 5: Verify a BIP-340 signature against the group public key
Verify {
#[arg(short, long)]
message: String,
#[arg(short, long)]
signature: String,
#[arg(short, long)]
public_key: PathBuf,
},
}
#[cfg(feature = "nostr")]
type NonceMap = BTreeMap<u32, SigningNonces>;
#[cfg(feature = "nostr")]
type CommitmentMap = BTreeMap<u32, SigningCommitments>;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
let cli = Cli::parse();
match &cli.command {
Commands::Keygen { threshold, total, output_dir } => {
println!("🛠️ Executing Keygen: {}-of-{}...", threshold, total);
let mut rng = ChaCha20Rng::from_entropy();
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
*total, *threshold, IdentifierList::Default, &mut rng
)?;
let path = output_dir.as_deref().unwrap_or(std::path::Path::new("."));
let pub_path = path.join("group_public.json");
fs::write(&pub_path, serde_json::to_string_pretty(&pubkey_package)?)?;
println!("✅ Saved Group Public Key to {:?}", pub_path);
for (id, share) in shares {
let key_pkg = frost::keys::KeyPackage::new(
id,
*share.signing_share(),
frost::keys::VerifyingShare::from(*share.signing_share()),
*pubkey_package.verifying_key(),
*threshold,
);
let id_hex = hex::encode(id.serialize());
let file_name = format!("p{}_key.json", id_hex);
fs::write(path.join(file_name), serde_json::to_string_pretty(&key_pkg)?)?;
}
}
Commands::Batch { count, key } => {
println!("📦 Executing Batch...");
let key_pkg: frost::keys::KeyPackage = serde_json::from_str(&fs::read_to_string(key)?)?;
let mut rng = ChaCha20Rng::from_entropy();
let mut public_commitments = CommitmentMap::new();
let mut secret_nonce_vault = NonceMap::new();
for i in 0..*count {
let (nonces, commitments) = round1::commit(key_pkg.signing_share(), &mut rng);
public_commitments.insert(i as u32, commitments);
secret_nonce_vault.insert(i as u32, nonces);
}
let id_hex = hex::encode(key_pkg.identifier().serialize());
fs::write(format!("p{}_vault.json", id_hex), serde_json::to_string(&secret_nonce_vault)?)?;
fs::write(format!("p{}_public_comms.json", id_hex), serde_json::to_string(&public_commitments)?)?;
println!("✅ Nonces and Commitments saved for ID {}", id_hex);
}
Commands::Sign { message, index, key, vault } => {
println!("✍️ Executing Sign: Index #{}...", index);
let key_pkg: frost::keys::KeyPackage = serde_json::from_str(&fs::read_to_string(key)?)?;
let mut vault_data: NonceMap = serde_json::from_str(&fs::read_to_string(vault)?)?;
let signing_nonces = vault_data.remove(&(*index as u32)).ok_or("Nonce not found!")?;
fs::write(vault, serde_json::to_string(&vault_data)?)?;
let mut commitments_map = BTreeMap::new();
commitments_map.insert(*key_pkg.identifier(), *signing_nonces.commitments());
// Discovery logic for peers
for entry in fs::read_dir(".")? {
let path = entry?.path();
let fname = path.file_name().unwrap().to_str().unwrap();
if fname.starts_with('p') && fname.contains("_public_comms.json") {
let id_hex = fname.strip_prefix('p').unwrap().strip_suffix("_public_comms.json").unwrap();
let peer_id: frost::Identifier = serde_json::from_str(&format!("\"{}\"", id_hex))?;
if peer_id != *key_pkg.identifier() {
let peer_comms: CommitmentMap = serde_json::from_str(&fs::read_to_string(&path)?)?;
if let Some(c) = peer_comms.get(&(*index as u32)) {
commitments_map.insert(peer_id, *c);
}
}
}
}
let signing_package = frost::SigningPackage::new(commitments_map, message.as_bytes());
let share = frost::round2::sign(&signing_package, &signing_nonces, &key_pkg)?;
let share_file = format!("p{}_share.json", hex::encode(key_pkg.identifier().serialize()));
fs::write(&share_file, serde_json::to_string(&share)?)?;
println!("✅ Share saved to {}", share_file);
}
Commands::Aggregate { message, shares } => {
println!("🧬 Executing Aggregate...");
let pubkey_package: frost::keys::PublicKeyPackage = serde_json::from_str(&fs::read_to_string("group_public.json")?)?;
let mut commitments_map = BTreeMap::new();
let mut signature_shares = BTreeMap::new();
for share_path in shares {
let share: frost::round2::SignatureShare = serde_json::from_str(&fs::read_to_string(share_path)?)?;
let fname = std::path::Path::new(share_path).file_name().unwrap().to_str().unwrap();
let id_hex = fname.strip_prefix('p').unwrap().strip_suffix("_share.json").unwrap();
let peer_id: frost::Identifier = serde_json::from_str(&format!("\"{}\"", id_hex))?;
let comms_file = format!("p{}_public_comms.json", id_hex);
let peer_comms: CommitmentMap = serde_json::from_str(&fs::read_to_string(comms_file)?)?;
commitments_map.insert(peer_id, *peer_comms.get(&0).unwrap());
signature_shares.insert(peer_id, share);
}
let signing_package = frost::SigningPackage::new(commitments_map, message.as_bytes());
let group_sig = frost::aggregate(&signing_package, &signature_shares, &pubkey_package)?;
let sig_hex = hex::encode(group_sig.serialize()?);
println!("✅ Aggregation Successful!\nFinal BIP-340 Signature: {}", sig_hex);
fs::write("final_signature.json", serde_json::to_string(&group_sig)?)?;
}
Commands::Verify { message, signature, public_key } => {
println!("🔍 Executing Verify...");
let pubkey_package: frost::keys::PublicKeyPackage = serde_json::from_str(&fs::read_to_string(public_key)?)?;
let sig_bytes = hex::decode(signature)?;
let group_sig = frost::Signature::deserialize(&sig_bytes)?;
match pubkey_package.verifying_key().verify(message.as_bytes(), &group_sig) {
Ok(_) => println!("✅ SUCCESS: The signature is VALID!"),
Err(_) => println!("❌ FAILURE: Invalid signature."),
}
}
}
Ok(())
}
use frost_secp256k1_tr as frost;
use frost::{Identifier, round1, round2};
use rand_chacha::ChaCha20Rng;
use rand_chacha::rand_core::SeedableRng;
use std::collections::BTreeMap;
use std::fs;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Load the persistent KeyPackage
let p1_json = fs::read_to_string("p1_key.json")?;
let p1_key_pkg: frost::keys::KeyPackage = serde_json::from_str(&p1_json)?;
let p1_id = *p1_key_pkg.identifier();
println!("--- BIP-64MOD: Batch Nonce Management ---");
// 2. BATCH GENERATION (The "Public Offer")
let mut rng = ChaCha20Rng::from_seed([88u8; 32]);
let mut public_commitments = BTreeMap::new();
let mut secret_nonce_vault = BTreeMap::new();
for i in 0..5 {
let (nonces, commitments) = round1::commit(p1_key_pkg.signing_share(), &mut rng);
public_commitments.insert(i, commitments);
secret_nonce_vault.insert(i, nonces);
}
// Save the vault (Private)
fs::write("p1_batch_vault.json", serde_json::to_string(&secret_nonce_vault)?)?;
println!("✅ Signer: Generated 5 nonces and saved to p1_batch_vault.json");
// 3. COORDINATOR REQUEST (Choosing Index #3)
let message = b"gnostr-gcc-batch-commit-hash-003";
let selected_index: u64 = 3;
let mut commitments_map = BTreeMap::new();
// Coordinator uses P1's commitment at the specific index
commitments_map.insert(p1_id, public_commitments[&selected_index]);
// Mock P2 to satisfy threshold
let mock_p2_id = Identifier::try_from(2u16)?;
let (_, p2_commitments) = round1::commit(p1_key_pkg.signing_share(), &mut rng);
commitments_map.insert(mock_p2_id, p2_commitments);
let signing_package = frost::SigningPackage::new(commitments_map, message);
println!("\n🚀 Coordinator: Requesting signature for Index #{}", selected_index);
// 4. SIGNER: Selective Fulfillment
let mut current_vault: BTreeMap<u64, round1::SigningNonces> =
serde_json::from_str(&fs::read_to_string("p1_batch_vault.json")?)?;
// Extract only the requested nonce
if let Some(p1_nonces) = current_vault.remove(&selected_index) {
let p1_share = round2::sign(&signing_package, &p1_nonces, &p1_key_pkg)?;
// Save the updated vault (Index 3 is now GONE)
fs::write("p1_batch_vault.json", serde_json::to_string(¤t_vault)?)?;
println!("✅ Signer: Signed message using Index #{}", selected_index);
println!("✅ Signer: Partial Signature: {}", hex::encode(p1_share.serialize()));
println!("🛡️ Signer: Index #{} purged from vault. {} nonces remain.",
selected_index, current_vault.len());
} else {
println!("❌ Error: Nonce index {} has already been used!", selected_index);
}
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() { println!("Enable nostr feature."); }
name: Rust
on:
push:
branches: [ "*" ]
pull_request:
branches: [ "*" ]
env:
CARGO_TERM_COLOR: always
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
RUST_LOG: info
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-15-intel, macos-latest, windows-latest]
features_args: ["", "--no-default-features", "--features nostr"]
steps:
- uses: actions/checkout@v4
- name: Install system deps (dbus)
if: runner.os == 'Linux'
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev
- name: Install protobuf (protoc)
shell: bash
run: |
set -euxo pipefail
if [[ "${{ runner.os }}" == "macOS" ]]; then
brew update
brew install protobuf
elif [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y protobuf-compiler
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install -y protoc
fi
- name: Build ${{ matrix.features_args }}
run: cargo build --workspace --verbose ${{ matrix.features_args }}
- name: Run workspace tests ${{ matrix.features_args }}
run: |
cargo test --workspace ${{ matrix.features_args }} -- --test-threads 1
- name: Run get_file_hash_core tests ${{ matrix.features_args }}
shell: bash
run: |
if [[ "${{ matrix.features_args }}" == "--features nostr" ]]; then
cargo test -p get_file_hash_core ${{ matrix.features_args }} -- --test-threads 1 --nocapture
else
cargo test -p get_file_hash_core ${{ matrix.features_args }} -- --test-threads 1
fi
- name: Run get_file_hash tests ${{ matrix.features_args }}
shell: bash
run: |
if [[ "${{ matrix.features_args }}" == "--features nostr" ]]; then
cargo test -p get_file_hash ${{ matrix.features_args }} -- --test-threads 1 --nocapture
else
cargo test -p get_file_hash ${{ matrix.features_args }} -- --test-threads 1
fi
- name: Build Release ${{ matrix.features_args }}
run: cargo build --workspace --release ${{ matrix.features_args }}
# `build.rs` Documentation
This document explains the functionality of the `build.rs` script in this project. The `build.rs` script is a special Rust file that, if present, Cargo will compile and run *before* compiling the rest of your package. It's typically used for tasks that need to be performed during the build process, such as generating code, setting environment variables, or performing conditional compilation.
## Core Functionality
The `build.rs` script in this project performs the following key functions:
1. **Environment Variable Injection:** It computes various project-related values at compile time and injects them as environment variables (`CARGO_RUSTC_ENV=...`) that can be accessed by the main crate using `env!("VAR_NAME")`. This includes:
* `CARGO_PKG_NAME`: The name of the current package (from `Cargo.toml`).
* `CARGO_PKG_VERSION`: The version of the current package (from `Cargo.toml`).
* `GIT_COMMIT_HASH`: The full commit hash of the current Git HEAD (if in a Git repository).
* `GIT_BRANCH`: The name of the current Git branch (if in a Git repository).
* `CARGO_TOML_HASH`: The SHA-256 hash of the `Cargo.toml` file.
* `LIB_HASH`: The SHA-256 hash of the `src/lib.rs` file.
* `BUILD_HASH`: The SHA-256 hash of the `build.rs` file itself.
2. **Rerun Conditions:** It tells Cargo when to re-run the build script. This ensures that the injected environment variables and any conditional compilation logic are up-to-date if relevant files change:
* `Cargo.toml`
* `src/lib.rs`
* `build.rs`
* `.git/HEAD` (to detect changes in the Git repository like new commits or branch switches).
* `src/get_file_hash_core/src/online_relays_gps.csv` (conditionally, if the file exists).
3. **Conditional Nostr Event Publishing (Release Builds with `nostr` feature):**
If the project is being compiled in **release mode (`--release`)** and the **`nostr` feature is enabled (`--features nostr`)**, the `build.rs` script will connect to Nostr relays and publish events. This is intended for "deterministic Nostr event build examples" as indicated by the comments in the file.
* **Relay Management:** It retrieves a list of default relay URLs. During event publishing, it identifies and removes "unfriendly" or unresponsive relays (e.g., those with timeout, connection issues, or spam blocks) from the list for subsequent publications.
* **File Hashing and Key Generation:** For each Git-tracked file (when in a Git repository), it computes its SHA-256 hash. This hash is then used to derive a Nostr `SecretKey`.
* **Event Creation:**
* **Individual File Events:** For each Git-tracked file, a Nostr `text_note` event is created. This event includes tags for:
* `#file`: The path of the file.
* `#version`: The package version.
* `#commit`: The Git commit hash (if in a Git repository).
* `#branch`: The Git branch name (if in a Git repository).
* **Metadata Event:** It publishes a metadata event using `get_file_hash_core::publish_metadata_event`.
* **Linking Event (Build Manifest):** After processing all individual files, if any events were published, a final "build manifest" `text_note` event is created. This event links to all the individual file events that were published during the build using event tags.
* **Output Storage:** The JSON representation of successfully published Nostr events (specifically the `EventId`) is saved to `~/.gnostr/build/{package_version}/{file_path_str_sanitized}/{hash}/{public_key}/{event_id}.json`. This provides a local record of what was published.
### `publish_nostr_event_if_release` Function
This asynchronous helper function is responsible for:
* Adding relays to the Nostr client.
* Connecting to relays.
* Signing the provided `EventBuilder` to create an `Event`.
* Sending the event to the configured relays.
* Logging success or failure for each relay.
* Identifying and removing unresponsive relays from the `relay_urls` list.
* Saving the published event's JSON to the local filesystem.
### `should_remove_relay` Function
This helper function determines if a relay should be considered "unfriendly" or unresponsive based on common error messages received during Nostr event publication.
## Usage
To prevent 'Too many open files' errors, especially during builds and tests involving numerous file operations or subprocesses (like `git ls-files` or parallel test execution), it may be necessary to increase the file descriptor limit.
* **For local development**: Run `ulimit -n 4096` in your terminal session before executing `cargo build` or `cargo test`. This setting is session-specific.
* **For CI environments**: The `.github/workflows/rust.yml` workflow is configured to set `ulimit -n 4096` for relevant test steps to ensure consistent execution.
The values set by `build.rs` can be accessed in your Rust code (e.g., `src/lib.rs`) at compile time using the `env!` macro. For example:
```rust
pub const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
```
The Nostr event publishing functionality of `build.rs` is primarily for release builds with the `nostr` feature enabled, allowing for the automatic, deterministic publication of project state to the Nostr network as part of the CI/CD pipeline.
## Example Commands
To interact with the `build.rs` script's features, especially those related to Nostr event publishing, you can use the following `cargo` commands:
* **Build in release mode with Nostr feature (verbose output):**
```bash
cargo build --release --workspace --features nostr -vv
```
* **Run tests for `get_file_hash_core` sequentially with Nostr feature and verbose logging (as in CI):**
```bash
RUST_LOG=info,nostr_sdk=debug,frost=debug cargo test -p get_file_hash_core --features nostr -- --test-threads 1 --nocapture
```
* **Run all workspace tests in release mode with Nostr feature:**
```bash
cargo test --workspace --release --features nostr
```
* **Build `get_file_hash_core` in release mode with Nostr feature (very verbose output):**
```bash
cargo build --release --features nostr -vv -p get_file_hash_core
```
* **Run `get_file_hash_core` tests in release mode with Nostr feature (very verbose output):**
```bash
cargo test --release --features nostr -vv -p get_file_hash_core
```
use frost_secp256k1_tr as frost;
use frost::{Identifier, round1, round2};
use rand_chacha::ChaCha20Rng;
use rand_chacha::rand_core::SeedableRng;
use std::fs;
use std::collections::BTreeMap;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. SETUP: Reload the KeyPackage we saved in the last example
let p1_json = fs::read_to_string("p1_key.json")
.map_err(|_| "Run example 6 first to generate p1_key.json")?;
let p1_key_pkg: frost::keys::KeyPackage = serde_json::from_str(&p1_json)?;
let p1_id = *p1_key_pkg.identifier();
println!("--- BIP-64MOD: Distributed Handshake Simulation ---");
// 2. SIGNER: Round 1 (Generate and Vault)
// In a real app, the Signer does this and sends the Commitment to a Nostr Relay.
let mut rng = ChaCha20Rng::from_seed([42u8; 32]);
let (p1_nonces, p1_commitments) = round1::commit(p1_key_pkg.signing_share(), &mut rng);
// Securely "vault" the secret nonces (Simulating a local DB or protected file)
let nonce_json = serde_json::to_string(&p1_nonces)?;
fs::write("p1_nonce_vault.json", nonce_json)?;
println!("✅ Signer: Generated Nonce and saved to p1_nonce_vault.json");
println!("✅ Signer: Shared Public Commitment: {}", hex::encode(p1_commitments.serialize()?));
// 3. COORDINATOR: Create Signing Request
// The Coordinator sees the commitment and asks the group to sign a Git Commit.
let message = b"gnostr-gcc-distributed-commit-xyz123";
let mut commitments_map = BTreeMap::new();
commitments_map.insert(p1_id, p1_commitments);
// We mock P2's commitment here to satisfy the 2-of-3 threshold
let mock_p2_id = Identifier::try_from(2u16)?;
let mut rng2 = ChaCha20Rng::from_seed([7u8; 32]);
let (_, p2_commitments) = round1::commit(p1_key_pkg.signing_share(), &mut rng2); // Mocking
commitments_map.insert(mock_p2_id, p2_commitments);
let signing_package = frost::SigningPackage::new(commitments_map, message);
println!("\n🚀 Coordinator: Created Signing Request for message: {:?}",
String::from_utf8_lossy(message));
// 4. SIGNER: Round 2 (Fulfill Request)
// Signer receives the SigningPackage, reloads their secret nonce, and signs.
let vaulted_nonce_json = fs::read_to_string("p1_nonce_vault.json")?;
let p1_reloaded_nonces: round1::SigningNonces = serde_json::from_str(&vaulted_nonce_json)?;
let p1_share = round2::sign(&signing_package, &p1_reloaded_nonces, &p1_key_pkg)?;
println!("✅ Signer: Fulfilled request with Signature Share: {}",
hex::encode(p1_share.serialize()));
// IMPORTANT: Delete the secret nonce after use to prevent reuse attacks!
fs::remove_file("p1_nonce_vault.json")?;
println!("🛡️ Signer: Secret nonce deleted from vault (Reuse Protection).");
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() { println!("Enable nostr feature."); }
[workspace]
members = ["cargo:."]
# Config for 'dist'
[dist]
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.30.3"
# CI backends to support
ci = "github"
# The installers to generate for each app
installers = ["shell", "powershell", "homebrew", "msi"]
# A GitHub repo to push Homebrew formulas to
tap = "gnostr-org/homebrew-gnostr-org"
# Target platforms to build apps for (Rust target-triple syntax)
targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"]
# Path that installers should place binaries in
install-path = "CARGO_HOME"
# Publish jobs to run in CI
publish-jobs = ["homebrew"]
# Whether to install an updater program
install-updater = true
# Skip checking whether the specified configuration files are up to date
allow-dirty = ["ci"]
use frost_secp256k1_tr as frost;
use frost::{Identifier, keys::IdentifierList, round1, round2};
use rand_chacha::ChaCha20Rng;
use rand_chacha::rand_core::SeedableRng;
use std::fs;
#[cfg(feature = "nostr")]
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. SETUP: Initial Key Generation (The "Genesis" event)
let mut dealer_rng = ChaCha20Rng::from_seed([0u8; 32]);
let min_signers = 2;
let (shares, pubkey_package) = frost::keys::generate_with_dealer(
3, min_signers, IdentifierList::Default, &mut dealer_rng
)?;
// 2. PERSISTENCE: Save Participant 1's KeyPackage to a file
let p1_id = Identifier::try_from(1u16)?;
let p1_key_pkg = frost::keys::KeyPackage::new(
p1_id,
*shares[&p1_id].signing_share(),
frost::keys::VerifyingShare::from(*shares[&p1_id].signing_share()),
*pubkey_package.verifying_key(),
min_signers,
);
// Serialize to JSON (standard for many Nostr/Git tools)
let p1_json = serde_json::to_string_pretty(&p1_key_pkg)?;
fs::write("p1_key.json", p1_json)?;
let pub_json = serde_json::to_string_pretty(&pubkey_package)?;
fs::write("group_public.json", pub_json)?;
println!("--- BIP-64MOD: Key Persistence ---");
println!("✅ Saved p1_key.json and group_public.json to disk.");
// 3. RELOAD: Simulate a Signer waking up later
let p1_loaded_json = fs::read_to_string("p1_key.json")?;
let p1_reloaded_pkg: frost::keys::KeyPackage = serde_json::from_str(&p1_loaded_json)?;
println!("✅ Reloaded KeyPackage for Participant: {:?}", p1_reloaded_pkg.identifier());
// 4. SIGN: Use the reloaded key to sign a new Git Commit Hash
let mut rng = ChaCha20Rng::from_seed([100u8; 32]); // Fresh seed for this specific signing session
let (nonces, commitments) = round1::commit(p1_reloaded_pkg.signing_share(), &mut rng);
println!("\nGenerated Nonce for new session:");
println!(" Commitment: {}", hex::encode(commitments.serialize()?));
// Cleanup files for the example
// fs::remove_file("p1_key.json")?;
// fs::remove_file("group_public.json")?;
Ok(())
}
#[cfg(not(feature = "nostr"))]
fn main() { println!("Enable nostr feature."); }
# This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
#
# * checks for a Git Tag that looks like a release
# * builds artifacts with dist (archives, installers, hashes)
# * uploads those artifacts to temporary workflow zip
# * on success, uploads the artifacts to a GitHub Release
#
# Note that the GitHub Release will be created with a generated
# title/body based on your changelogs.
name: Release
permissions:
"contents": "write"
# This task will run whenever you push a git tag that looks like a version
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
#
# If PACKAGE_NAME is specified, then the announcement will be for that
# package (erroring out if it doesn't have the given version or isn't dist-able).
#
# If PACKAGE_NAME isn't specified, then the announcement will be for all
# (dist-able) packages in the workspace with that version (this mode is
# intended for workspaces with only one dist-able package, or with all dist-able
# packages versioned/released in lockstep).
#
# If you push multiple tags at once, separate instances of this workflow will
# spin up, creating an independent announcement for each one. However, GitHub
# will hard limit this to 3 tags per commit, as it will assume more tags is a
# mistake.
#
# If there's a prerelease-style suffix to the version, then the release(s)
# will be marked as a prerelease.
on:
pull_request:
push:
tags:
- '**[0-9]+.[0-9]+.[0-9]+*'
jobs:
# Run 'dist plan' (or host) to determine what tasks we need to do
install-deps:
runs-on: "ubuntu-latest"
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ !github.event.pull_request && github.ref_name || '' }}
tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }}
publishing: ${{ !github.event.pull_request }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install-deps
shell: bash
run: |
if [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev curl unzip
# Install a pinned modern protoc
PROTOC_ZIP=protoc-25.3-linux-x86_64.zip
curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v25.3/$PROTOC_ZIP
sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc
sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*'
rm -f $PROTOC_ZIP
elif [[ "${{ runner.os }}" == "macOS" ]]; then
brew install protobuf
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install protoc --no-progress
echo "C:\ProgramData\chocolatey\bin" >> $GITHUB_PATH
fi
protoc --version
plan:
needs:
- install-deps
runs-on: "ubuntu-latest"
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ !github.event.pull_request && github.ref_name || '' }}
tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }}
publishing: ${{ !github.event.pull_request }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.3/cargo-dist-installer.sh | sh"
- name: Cache dist
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/dist
# sure would be cool if github gave us proper conditionals...
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
# functionality based on whether this is a pull_request, and whether it's from a fork.
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
# but also really annoying to build CI around when it needs secrets to work right.)
- id: plan
run: |
dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "dist ran successfully"
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
name: artifacts-plan-dist-manifest
path: plan-dist-manifest.json
# Build and packages all the platform-specific things
build-local-artifacts:
name: build-local-artifacts (${{ join(matrix.targets, ', ') }})
# Let the initial task tell us to not run (currently very blunt)
needs:
- plan
- install-deps
if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }}
strategy:
fail-fast: false
# Target platforms/runners are computed by dist in create-release.
# Each member of the matrix has the following arguments:
#
# - runner: the github runner
# - dist-args: cli flags to pass to dist
# - install-dist: expression to run to install dist on the runner
#
# Typically there will be:
# - 1 "global" task that builds universal installers
# - N "local" tasks that build each platform's binaries and platform-specific installers
matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }}
runs-on: ${{ matrix.runner }}
container: ${{ matrix.container && matrix.container.image || null }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json
steps:
- name: enable windows longpaths
run: |
git config --global core.longpaths true
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install Rust non-interactively if not already installed
if: ${{ matrix.container }}
run: |
if ! command -v cargo > /dev/null 2>&1; then
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
fi
- name: Install dist
run: ${{ matrix.install_dist.run }}
# Get the dist-manifest
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- name: Install dependencies
run: |
${{ matrix.packages_install }}
- name: Install-deps
shell: bash
run: |
if [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev curl unzip
# Install a pinned modern protoc
PROTOC_ZIP=protoc-25.3-linux-x86_64.zip
curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v25.3/$PROTOC_ZIP
sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc
sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*'
rm -f $PROTOC_ZIP
elif [[ "${{ runner.os }}" == "macOS" ]]; then
brew install protobuf
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install protoc --no-progress
echo "C:\ProgramData\chocolatey\bin" >> $GITHUB_PATH
fi
protoc --version
- name: Configure Cargo for Windows builds
if: runner.os == 'Windows'
shell: bash
run: |
# Put target dir somewhere short and less likely to be locked
echo "CARGO_TARGET_DIR=D:/cargo-target" >> $GITHUB_ENV
# Reduce file-handle pressure / parallel writes
echo "CARGO_BUILD_JOBS=2" >> $GITHUB_ENV
# Avoid incremental artifacts (less churn, fewer locks)
echo "CARGO_INCREMENTAL=0" >> $GITHUB_ENV
- name: Build artifacts
run: |
# Actually do builds and make zips and whatnot
dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json
echo "dist ran successfully"
- id: cargo-dist
name: Post-build
# We force bash here just because github makes it really hard to get values up
# to "real" actions without writing to env-vars, and writing to env-vars has
# inconsistent syntax between shell and powershell.
shell: bash
run: |
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@v4
with:
name: artifacts-build-local-${{ join(matrix.targets, '_') }}
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
# Build and package all the platform-agnostic(ish) things
build-global-artifacts:
needs:
- plan
- install-deps
- build-local-artifacts
runs-on: "ubuntu-latest"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
- name: Install-deps
shell: bash
run: |
if [[ "${{ runner.os }}" == "Linux" ]]; then
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev curl unzip
# Install a pinned modern protoc
PROTOC_ZIP=protoc-25.3-linux-x86_64.zip
curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v25.3/$PROTOC_ZIP
sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc
sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*'
rm -f $PROTOC_ZIP
elif [[ "${{ runner.os }}" == "macOS" ]]; then
brew install protobuf
elif [[ "${{ runner.os }}" == "Windows" ]]; then
choco install protoc --no-progress
echo "C:\ProgramData\chocolatey\bin" >> $GITHUB_PATH
fi
protoc --version
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: cargo-dist
shell: bash
run: |
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "dist ran successfully"
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@v4
with:
name: artifacts-build-global
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
# Determines if we should publish/announce
host:
needs:
- plan
- install-deps
- build-local-artifacts
- build-global-artifacts
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
runs-on: "ubuntu-latest"
outputs:
val: ${{ steps.host.outputs.manifest }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: host
shell: bash
run: |
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
echo "artifacts uploaded and released successfully"
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
# Overwrite the previous copy
name: artifacts-dist-manifest
path: dist-manifest.json
# Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts"
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: artifacts
merge-multiple: true
- name: Cleanup
run: |
# Remove the granular manifests
rm -f artifacts/*-dist-manifest.json
- name: Create GitHub Release
env:
PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}"
ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}"
ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}"
RELEASE_COMMIT: "${{ github.sha }}"
run: |
# Write and read notes from a file to avoid quoting breaking things
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
publish-homebrew-formula:
needs:
- plan
- host
- install-deps
- build-local-artifacts
- build-global-artifacts
runs-on: "ubuntu-latest"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PLAN: ${{ needs.plan.outputs.val }}
GITHUB_USER: "axo bot"
GITHUB_EMAIL: "
[email protected]"
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: true
repository: "gnostr-org/homebrew-gnostr"
token: ${{ secrets.HOMEBREW_TAP_TOKEN }}
# So we have access to the formula
- name: Fetch homebrew formulae
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: Formula/
merge-multiple: true
# This is extra complex because you can make your Formula name not match your app name
# so we need to find releases with a *.rb file, and publish with that filename.
- name: Commit formula files
run: |
git config --global user.name "${GITHUB_USER}"
git config --global user.email "${GITHUB_EMAIL}"
for release in $(echo "$PLAN" | jq --compact-output '.releases[] | select([.artifacts[] | endswith(".rb")] | any)'); do
filename=$(echo "$release" | jq '.artifacts[] | select(endswith(".rb"))' --raw-output)
name=$(echo "$filename" | sed "s/\.rb$//")
version=$(echo "$release" | jq .app_version --raw-output)
export PATH="/home/linuxbrew/.linuxbrew/bin:$PATH"
brew update
# We avoid reformatting user-provided data such as the app description and homepage.
brew style --except-cops FormulaAudit/Homepage,FormulaAudit/Desc,FormulaAuditStrict --fix "Formula/${filename}" || true
git add "Formula/${filename}"
git commit -m "${name} ${version}"
done
git push
announce:
needs:
- plan
- install-deps
- build-local-artifacts
- build-global-artifacts
- host
- publish-homebrew-formula
# use "always() && ..." to allow us to wait for all publish jobs while
# still allowing individual publish jobs to skip themselves (for prereleases).
# "host" however must run to completion, no skipping allowed!
if: ${{ always() && needs.host.result == 'success' && (needs.publish-homebrew-formula.result == 'skipped' || needs.publish-homebrew-formula.result == 'success') }}
runs-on: "ubuntu-latest"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
/// deterministic nostr event build example
// deterministic nostr event build example
use get_file_hash_core::get_file_hash;
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use get_file_hash_core::{get_git_tracked_files, DEFAULT_GNOSTR_KEY, DEFAULT_PICTURE_URL, DEFAULT_BANNER_URL, publish_nostr_event_if_release, get_repo_announcement_event};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use nostr_sdk::{EventBuilder, Keys, Tag, SecretKey};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use std::fs;
use std::path::PathBuf;
use sha2::{Digest, Sha256};
#[cfg(all(not(debug_assertions), feature = "nostr"))]
use ::hex;
#[cfg(feature = "gen-protos")]
fn compile_protos() {
tonic_prost_build::configure()
.build_server(true)
.build_client(true)
.build_transport(true)
.protoc_arg("--experimental_allow_proto3_optional")
//.compile_protos(&["proto/plugins.proto"], &["proto"])
.compile_protos(&["n34-relay/proto/plugins.proto"], &["n34-relay/proto"])
.expect("protoc is required");
}
#[cfg(not(feature = "gen-protos"))]
fn compile_protos() {}
#[tokio::main]
async fn main() {
compile_protos();
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let is_git_repo = std::path::Path::new(&manifest_dir).join(".git").exists();
#[cfg(all(not(debug_assertions), feature = "nostr"))]
#[allow(unused_mut)]
let mut git_branch_str = String::new();
println!("cargo:rustc-env=CARGO_PKG_NAME={}", env!("CARGO_PKG_NAME"));
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", env!("CARGO_PKG_VERSION"));
if is_git_repo {
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
let git_branch_output = std::process::Command::new("git")
.args(&["rev-parse", "--abbrev-ref", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for branch name");
let git_branch_str = if git_branch_output.status.success() && !git_branch_output.stdout.is_empty() {
String::from_utf8(git_branch_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git branch command failed or returned empty. Status: {:?}, Stderr: {}",
git_branch_output.status, String::from_utf8_lossy(&git_branch_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_BRANCH={}", git_branch_str);
} else {
println!("cargo:rustc-env=GIT_COMMIT_HASH=");
println!("cargo:rustc-env=GIT_BRANCH=");
}
println!("cargo:rerun-if-changed=.git/HEAD");
//#[cfg(all(not(debug_assertions), feature = "nostr"))]
//let relay_urls = get_file_hash_core::get_relay_urls();
let cargo_toml_hash = get_file_hash!("Cargo.toml");
println!("cargo:rustc-env=CARGO_TOML_HASH={}", cargo_toml_hash);
let lib_hash = get_file_hash!("src/lib.rs");
println!("cargo:rustc-env=LIB_HASH={}", lib_hash);
let build_hash = get_file_hash!("build.rs");
println!("cargo:rustc-env=BUILD_HASH={}", build_hash);
println!("cargo:rerun-if-changed=Cargo.toml");
println!("cargo:rerun-if-changed=src/lib.rs");
println!("cargo:rerun-if-changed=build.rs");
let online_relays_csv_path = PathBuf::from(&manifest_dir).join("src/get_file_hash_core/src/online_relays_gps.csv");
if online_relays_csv_path.exists() {
println!("cargo:rerun-if-changed={}", online_relays_csv_path.to_str().unwrap());
}
#[cfg(all(not(debug_assertions), feature = "nostr"))]
if cfg!(not(debug_assertions)) {
println!("cargo:warning=Nostr feature enabled: Build may take longer due to network operations (publishing events to relays).");
// This code only runs in release builds
let package_version = std::env::var("CARGO_PKG_VERSION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let files_to_publish: Vec<String> = get_git_tracked_files(&PathBuf::from(&manifest_dir));
let git_commit_hash_output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.expect("Failed to execute git command for commit hash");
let git_commit_hash_str = if git_commit_hash_output.status.success() && !git_commit_hash_output.stdout.is_empty() {
String::from_utf8(git_commit_hash_output.stdout).unwrap().trim().to_string()
} else {
println!("cargo:warning=Git commit hash command failed or returned empty. Status: {:?}, Stderr: {}",
git_commit_hash_output.status, String::from_utf8_lossy(&git_commit_hash_output.stderr));
String::new()
};
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash_str);
// Create padded_commit_hash
let padded_commit_hash = format!("{:0>64}", &git_commit_hash_str);
println!("cargo:rustc-env=PADDED_COMMIT_HASH={}", padded_commit_hash);
// Initialize client and keys once
let initial_secret_key = SecretKey::parse(&padded_commit_hash).expect("Failed to create Nostr SecretKey from PADDED_COMMIT_HASH");
let initial_keys = Keys::new(initial_secret_key);
let mut client = nostr_sdk::Client::new(initial_keys.clone());
let mut relay_urls = get_file_hash_core::get_relay_urls();
// Add relays to the client
for relay_url in relay_urls.iter() {
if let Err(e) = client.add_relay(relay_url).await {
println!("cargo:warning=Failed to add relay {}: {}", relay_url, e);
}
}
client.connect().await;
println!("cargo:warning=Added and connected to {} relays.", relay_urls.len());
let mut published_event_ids: Vec<Tag> = Vec::new();
let mut total_bytes_sent: usize = 0;
for file_path_str in &files_to_publish {
println!("cargo:warning=Processing file: {}", file_path_str);
match fs::read(file_path_str) {
Ok(bytes) => {
let mut hasher = Sha256::new();
hasher.update(&bytes);
let result = hasher.finalize();
let file_hash_hex = hex::encode(result);
match SecretKey::from_hex(&file_hash_hex.clone()) {
Ok(secret_key) => {
let keys = Keys::new(secret_key);
let content = String::from_utf8_lossy(&bytes).into_owned();
let tags = vec![
Tag::parse(["file", file_path_str].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["version", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
let event_builder = EventBuilder::text_note(content).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(&mut client, file_hash_hex, keys.clone(), event_builder, &mut relay_urls, file_path_str, &output_dir, &mut total_bytes_sent).await {
published_event_ids.push(Tag::event(event_id));
}
// Publish metadata event
get_file_hash_core::publish_metadata_event(
&keys,
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
file_path_str,
).await;
}
Err(e) => {
println!("cargo:warning=Failed to derive Nostr secret key for {}: {}", file_path_str, e);
}
}
}
Err(e) => {
println!("cargo:warning=Failed to read file {}: {}", file_path_str, e);
}
}
}
// Create and publish the build_manifest
if !published_event_ids.is_empty() {
//TODO this will be either the default or detected from env vars PRIVATE_KEY
let keys = Keys::new(SecretKey::from_hex(DEFAULT_GNOSTR_KEY).expect("Failed to create Nostr keys from DEFAULT_GNOSTR_KEY"));
let cloned_keys = keys.clone();
let content = format!("Build manifest for get_file_hash v{}", package_version);
let mut tags = vec![
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
Tag::parse(["build_manifest", &package_version].iter().map(ToString::to_string).collect::<Vec<String>>()).unwrap(),
];
tags.extend(published_event_ids);
let event_builder = EventBuilder::text_note(content.clone()).tags(tags);
if let Some(event_id) = publish_nostr_event_if_release(
&mut client,
hex::encode(Sha256::digest(content.as_bytes())),
keys,
event_builder,
&mut relay_urls,
"build_manifest.json",
&output_dir,
&mut total_bytes_sent,
).await {
let build_manifest_event_id = Some(event_id);
// Publish metadata event for the build manifest
get_file_hash_core::publish_metadata_event(
&cloned_keys, // Use reference to cloned keys here
&relay_urls,
DEFAULT_PICTURE_URL,
DEFAULT_BANNER_URL,
&format!("build_manifest:{}", package_version),
).await;
let git_commit_hash = &git_commit_hash_str;
let git_branch = &git_branch_str;
let repo_url = std::env::var("CARGO_PKG_REPOSITORY").unwrap();
let repo_name = std::env::var("CARGO_PKG_NAME").unwrap();
let repo_description = std::env::var("CARGO_PKG_DESCRIPTION").unwrap();
let output_dir = PathBuf::from(format!(".gnostr/build/{}", package_version));
if let Err(e) = fs::create_dir_all(&output_dir) {
println!("cargo:warning=Failed to create output directory {}: {}", output_dir.display(), e);
}
let announcement_keys = Keys::new(SecretKey::from_hex(build_manifest_event_id.unwrap().to_hex().as_str()).expect("Failed to create Nostr keys from build_manifest_event_id"));
let announcement_pubkey_hex = announcement_keys.public_key().to_string();
// Publish NIP-34 Repository Announcement
if let Some(_event_id) = get_repo_announcement_event(
&mut client,
&announcement_keys,
&relay_urls,
&repo_url,
&repo_name,
&repo_description,
&git_commit_hash,
&git_branch,
&output_dir,
&announcement_pubkey_hex
).await {
// Successfully published announcement
}
}
}
println!("cargo:warning=Total bytes sent to Nostr relays: {} bytes ({} MB)", total_bytes_sent, total_bytes_sent as f64 / 1024.0 / 1024.0);
}
}
// deterministic nostr event build example